var/home/core/zuul-output/0000755000175000017500000000000015116506062014527 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015116520347015475 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003123626615116520341017704 0ustar rootrootDec 11 09:21:08 crc systemd[1]: Starting Kubernetes Kubelet... Dec 11 09:21:08 crc restorecon[4697]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:08 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:09 crc restorecon[4697]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 09:21:09 crc restorecon[4697]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 11 09:21:10 crc kubenswrapper[4788]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 09:21:10 crc kubenswrapper[4788]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 11 09:21:10 crc kubenswrapper[4788]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 09:21:10 crc kubenswrapper[4788]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 09:21:10 crc kubenswrapper[4788]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 11 09:21:10 crc kubenswrapper[4788]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.287683 4788 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292146 4788 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292172 4788 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292179 4788 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292186 4788 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292192 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292198 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292204 4788 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292211 4788 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292217 4788 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292243 4788 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292252 4788 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292259 4788 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292267 4788 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292274 4788 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292281 4788 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292288 4788 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292296 4788 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292303 4788 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292317 4788 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292323 4788 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292330 4788 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292336 4788 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292343 4788 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292350 4788 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292357 4788 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292363 4788 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292369 4788 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292376 4788 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292382 4788 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292388 4788 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292394 4788 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292400 4788 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292407 4788 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292416 4788 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292422 4788 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292427 4788 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292432 4788 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292437 4788 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292443 4788 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292448 4788 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292459 4788 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292467 4788 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292475 4788 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292482 4788 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292491 4788 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292498 4788 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292505 4788 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292511 4788 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292518 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292524 4788 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292531 4788 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292539 4788 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292545 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292551 4788 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292557 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292566 4788 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292574 4788 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292582 4788 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292590 4788 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292597 4788 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292605 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292611 4788 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292617 4788 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292624 4788 feature_gate.go:330] unrecognized feature gate: Example Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292631 4788 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292638 4788 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292644 4788 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292650 4788 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292657 4788 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292671 4788 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.292676 4788 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.292988 4788 flags.go:64] FLAG: --address="0.0.0.0" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293003 4788 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293018 4788 flags.go:64] FLAG: --anonymous-auth="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293027 4788 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293035 4788 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293042 4788 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293051 4788 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293060 4788 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293067 4788 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293074 4788 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293081 4788 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293087 4788 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293094 4788 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293100 4788 flags.go:64] FLAG: --cgroup-root="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293106 4788 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293112 4788 flags.go:64] FLAG: --client-ca-file="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293118 4788 flags.go:64] FLAG: --cloud-config="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293124 4788 flags.go:64] FLAG: --cloud-provider="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293130 4788 flags.go:64] FLAG: --cluster-dns="[]" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293140 4788 flags.go:64] FLAG: --cluster-domain="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293145 4788 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293151 4788 flags.go:64] FLAG: --config-dir="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293157 4788 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293164 4788 flags.go:64] FLAG: --container-log-max-files="5" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293172 4788 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293179 4788 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293186 4788 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293247 4788 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293256 4788 flags.go:64] FLAG: --contention-profiling="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293262 4788 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293268 4788 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293275 4788 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293280 4788 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293289 4788 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293299 4788 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293307 4788 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293313 4788 flags.go:64] FLAG: --enable-load-reader="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293320 4788 flags.go:64] FLAG: --enable-server="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293327 4788 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293334 4788 flags.go:64] FLAG: --event-burst="100" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293341 4788 flags.go:64] FLAG: --event-qps="50" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293347 4788 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293353 4788 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293360 4788 flags.go:64] FLAG: --eviction-hard="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293368 4788 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293374 4788 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293380 4788 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293386 4788 flags.go:64] FLAG: --eviction-soft="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293392 4788 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293398 4788 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293406 4788 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293413 4788 flags.go:64] FLAG: --experimental-mounter-path="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293511 4788 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293520 4788 flags.go:64] FLAG: --fail-swap-on="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293527 4788 flags.go:64] FLAG: --feature-gates="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293537 4788 flags.go:64] FLAG: --file-check-frequency="20s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293550 4788 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293567 4788 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293576 4788 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293584 4788 flags.go:64] FLAG: --healthz-port="10248" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293592 4788 flags.go:64] FLAG: --help="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293598 4788 flags.go:64] FLAG: --hostname-override="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293604 4788 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293611 4788 flags.go:64] FLAG: --http-check-frequency="20s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293618 4788 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293624 4788 flags.go:64] FLAG: --image-credential-provider-config="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293629 4788 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293636 4788 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293642 4788 flags.go:64] FLAG: --image-service-endpoint="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293648 4788 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293662 4788 flags.go:64] FLAG: --kube-api-burst="100" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293668 4788 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293676 4788 flags.go:64] FLAG: --kube-api-qps="50" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293682 4788 flags.go:64] FLAG: --kube-reserved="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293688 4788 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293694 4788 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293701 4788 flags.go:64] FLAG: --kubelet-cgroups="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293707 4788 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293713 4788 flags.go:64] FLAG: --lock-file="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293718 4788 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293725 4788 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293731 4788 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293743 4788 flags.go:64] FLAG: --log-json-split-stream="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293749 4788 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293755 4788 flags.go:64] FLAG: --log-text-split-stream="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293761 4788 flags.go:64] FLAG: --logging-format="text" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293767 4788 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293774 4788 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293780 4788 flags.go:64] FLAG: --manifest-url="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293788 4788 flags.go:64] FLAG: --manifest-url-header="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293797 4788 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293804 4788 flags.go:64] FLAG: --max-open-files="1000000" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293812 4788 flags.go:64] FLAG: --max-pods="110" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293818 4788 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293824 4788 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293831 4788 flags.go:64] FLAG: --memory-manager-policy="None" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293836 4788 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293842 4788 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293849 4788 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293855 4788 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293873 4788 flags.go:64] FLAG: --node-status-max-images="50" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293880 4788 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293886 4788 flags.go:64] FLAG: --oom-score-adj="-999" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293892 4788 flags.go:64] FLAG: --pod-cidr="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293898 4788 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293909 4788 flags.go:64] FLAG: --pod-manifest-path="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293921 4788 flags.go:64] FLAG: --pod-max-pids="-1" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293928 4788 flags.go:64] FLAG: --pods-per-core="0" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293935 4788 flags.go:64] FLAG: --port="10250" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293941 4788 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293948 4788 flags.go:64] FLAG: --provider-id="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293954 4788 flags.go:64] FLAG: --qos-reserved="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293959 4788 flags.go:64] FLAG: --read-only-port="10255" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293966 4788 flags.go:64] FLAG: --register-node="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293972 4788 flags.go:64] FLAG: --register-schedulable="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.293986 4788 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294000 4788 flags.go:64] FLAG: --registry-burst="10" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294006 4788 flags.go:64] FLAG: --registry-qps="5" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294012 4788 flags.go:64] FLAG: --reserved-cpus="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294018 4788 flags.go:64] FLAG: --reserved-memory="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294027 4788 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294033 4788 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294039 4788 flags.go:64] FLAG: --rotate-certificates="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294046 4788 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294052 4788 flags.go:64] FLAG: --runonce="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294058 4788 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294064 4788 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294070 4788 flags.go:64] FLAG: --seccomp-default="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294077 4788 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294084 4788 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294090 4788 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294097 4788 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294103 4788 flags.go:64] FLAG: --storage-driver-password="root" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294110 4788 flags.go:64] FLAG: --storage-driver-secure="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294116 4788 flags.go:64] FLAG: --storage-driver-table="stats" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294123 4788 flags.go:64] FLAG: --storage-driver-user="root" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294130 4788 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294136 4788 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294143 4788 flags.go:64] FLAG: --system-cgroups="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294150 4788 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294161 4788 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294168 4788 flags.go:64] FLAG: --tls-cert-file="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294174 4788 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294183 4788 flags.go:64] FLAG: --tls-min-version="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294189 4788 flags.go:64] FLAG: --tls-private-key-file="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294195 4788 flags.go:64] FLAG: --topology-manager-policy="none" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294201 4788 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294209 4788 flags.go:64] FLAG: --topology-manager-scope="container" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294216 4788 flags.go:64] FLAG: --v="2" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294247 4788 flags.go:64] FLAG: --version="false" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294257 4788 flags.go:64] FLAG: --vmodule="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294265 4788 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.294272 4788 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294450 4788 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294457 4788 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294463 4788 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294468 4788 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294473 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294479 4788 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294484 4788 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294489 4788 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294494 4788 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294499 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294505 4788 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294510 4788 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294516 4788 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294521 4788 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294526 4788 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294532 4788 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294537 4788 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294542 4788 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294548 4788 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294554 4788 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294559 4788 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294566 4788 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294574 4788 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294580 4788 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294585 4788 feature_gate.go:330] unrecognized feature gate: Example Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294592 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294600 4788 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294605 4788 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294611 4788 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294616 4788 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294621 4788 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294626 4788 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294631 4788 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294637 4788 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294642 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294647 4788 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294653 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294658 4788 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294663 4788 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294668 4788 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294673 4788 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294678 4788 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294685 4788 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294692 4788 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294697 4788 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294702 4788 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294707 4788 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294712 4788 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294717 4788 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294722 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294728 4788 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294734 4788 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294740 4788 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294752 4788 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294759 4788 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294767 4788 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294774 4788 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294780 4788 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294787 4788 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294794 4788 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294799 4788 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294813 4788 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294818 4788 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294823 4788 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294830 4788 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294835 4788 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294840 4788 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294845 4788 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294851 4788 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294856 4788 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.294862 4788 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.295059 4788 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.306157 4788 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.306193 4788 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306268 4788 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306278 4788 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306283 4788 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306290 4788 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306297 4788 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306303 4788 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306309 4788 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306314 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306319 4788 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306323 4788 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306328 4788 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306331 4788 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306335 4788 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306338 4788 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306342 4788 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306347 4788 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306353 4788 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306358 4788 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306363 4788 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306368 4788 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306373 4788 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306377 4788 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306382 4788 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306386 4788 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306390 4788 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306395 4788 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306400 4788 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306404 4788 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306408 4788 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306412 4788 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306416 4788 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306419 4788 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306423 4788 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306427 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306431 4788 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306435 4788 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306438 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306442 4788 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306446 4788 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306450 4788 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306454 4788 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306458 4788 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306461 4788 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306465 4788 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306468 4788 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306472 4788 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306475 4788 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306479 4788 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306483 4788 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306486 4788 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306490 4788 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306495 4788 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306500 4788 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306505 4788 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306510 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306515 4788 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306519 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306524 4788 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306528 4788 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306533 4788 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306537 4788 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306542 4788 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306546 4788 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306551 4788 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306556 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306561 4788 feature_gate.go:330] unrecognized feature gate: Example Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306565 4788 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306570 4788 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306574 4788 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306578 4788 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306583 4788 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.306595 4788 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306760 4788 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306770 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306777 4788 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306782 4788 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306788 4788 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306794 4788 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306800 4788 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306805 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306812 4788 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306818 4788 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306823 4788 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306828 4788 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306834 4788 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306838 4788 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306843 4788 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306848 4788 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306854 4788 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306859 4788 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306863 4788 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306868 4788 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306872 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306877 4788 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306881 4788 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306888 4788 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306894 4788 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306898 4788 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306903 4788 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306908 4788 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306914 4788 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306919 4788 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306924 4788 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306930 4788 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306937 4788 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306942 4788 feature_gate.go:330] unrecognized feature gate: Example Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306947 4788 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306952 4788 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306957 4788 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306961 4788 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306966 4788 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306970 4788 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306974 4788 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306979 4788 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306983 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306988 4788 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306993 4788 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.306997 4788 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307002 4788 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307006 4788 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307011 4788 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307015 4788 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307019 4788 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307025 4788 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307031 4788 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307035 4788 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307040 4788 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307044 4788 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307048 4788 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307052 4788 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307056 4788 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307060 4788 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307065 4788 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307070 4788 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307075 4788 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307079 4788 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307084 4788 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307088 4788 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307091 4788 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307096 4788 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307099 4788 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307103 4788 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.307107 4788 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.307113 4788 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.307575 4788 server.go:940] "Client rotation is on, will bootstrap in background" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.310771 4788 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.310861 4788 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.311669 4788 server.go:997] "Starting client certificate rotation" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.311698 4788 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.311931 4788 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-20 22:46:01.464537404 +0000 UTC Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.312068 4788 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 229h24m51.152474591s for next certificate rotation Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.332399 4788 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.338578 4788 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.353718 4788 log.go:25] "Validated CRI v1 runtime API" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.389408 4788 log.go:25] "Validated CRI v1 image API" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.391920 4788 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.399522 4788 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-11-09-16-43-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.399619 4788 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.420536 4788 manager.go:217] Machine: {Timestamp:2025-12-11 09:21:10.418819454 +0000 UTC m=+0.489599060 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:14f7cb2d-755c-4224-b9c8-56700be1f839 BootID:47dfd2f0-3ae8-470e-8291-4c306acc36e8 Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:8c:65:2c Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:8c:65:2c Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:12:e0:c2 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:ad:00:2d Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:21:83:e1 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:f6:f3:ef Speed:-1 Mtu:1496} {Name:eth10 MacAddress:6e:10:4b:c6:ff:fa Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:c2:b9:c4:7e:7f:c4 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.420800 4788 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.420973 4788 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.422587 4788 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.422854 4788 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.422910 4788 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.423202 4788 topology_manager.go:138] "Creating topology manager with none policy" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.423217 4788 container_manager_linux.go:303] "Creating device plugin manager" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.423467 4788 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.423505 4788 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.423962 4788 state_mem.go:36] "Initialized new in-memory state store" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.424074 4788 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.424942 4788 kubelet.go:418] "Attempting to sync node with API server" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.424969 4788 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.424998 4788 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.425014 4788 kubelet.go:324] "Adding apiserver pod source" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.425028 4788 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.429404 4788 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.430350 4788 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.432495 4788 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.434286 4788 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.238:6443: connect: connection refused Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.434456 4788 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.238:6443: connect: connection refused" logger="UnhandledError" Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.434402 4788 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.238:6443: connect: connection refused Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.434549 4788 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.238:6443: connect: connection refused" logger="UnhandledError" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434625 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434662 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434672 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434682 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434699 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434709 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434719 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434733 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434746 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434757 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434797 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.434808 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.435101 4788 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.435925 4788 server.go:1280] "Started kubelet" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.436304 4788 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.436308 4788 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.436836 4788 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.238:6443: connect: connection refused Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.437267 4788 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.438338 4788 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 11 09:21:10 crc systemd[1]: Started Kubernetes Kubelet. Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.438384 4788 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.438472 4788 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 05:46:30.432689981 +0000 UTC Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.438505 4788 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 332h25m19.994186422s for next certificate rotation Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.438615 4788 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.438638 4788 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.438794 4788 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.439189 4788 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.439561 4788 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" interval="200ms" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.439930 4788 factory.go:55] Registering systemd factory Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.440004 4788 factory.go:221] Registration of the systemd container factory successfully Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.440426 4788 factory.go:153] Registering CRI-O factory Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.440463 4788 factory.go:221] Registration of the crio container factory successfully Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.440550 4788 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.440588 4788 factory.go:103] Registering Raw factory Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.440610 4788 manager.go:1196] Started watching for new ooms in manager Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.440247 4788 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.238:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.18801ec184dd6b81 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 09:21:10.435859329 +0000 UTC m=+0.506638935,LastTimestamp:2025-12-11 09:21:10.435859329 +0000 UTC m=+0.506638935,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.441193 4788 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.238:6443: connect: connection refused Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.442211 4788 manager.go:319] Starting recovery of all containers Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.442572 4788 server.go:460] "Adding debug handlers to kubelet server" Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.441857 4788 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.238:6443: connect: connection refused" logger="UnhandledError" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.451338 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.451407 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.451426 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.451441 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452159 4788 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452207 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452274 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452302 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452325 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452349 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452370 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452392 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452411 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452428 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452448 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452465 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452482 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452499 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452516 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452537 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452579 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452598 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452628 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452650 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452670 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452694 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452717 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452767 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452792 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452808 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452825 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452843 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452861 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452878 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452892 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452908 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452924 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452941 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452959 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452975 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.452989 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453005 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453023 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453041 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453057 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453077 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453093 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453141 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453160 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453179 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453199 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453218 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453258 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453282 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453299 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453317 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453336 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453354 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453371 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453387 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453401 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453416 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453430 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453447 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453463 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453480 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453495 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453512 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453527 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453542 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453558 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453572 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453588 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453608 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453629 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453650 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453674 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453695 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453712 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453736 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453758 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453774 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453790 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453804 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453818 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453831 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453847 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453861 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453877 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453891 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453905 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453919 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453932 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453947 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453960 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453973 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.453986 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454002 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454016 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454029 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454042 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454058 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454074 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454089 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454105 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454128 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454178 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454197 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454215 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454255 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454273 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454291 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454309 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454326 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454343 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454359 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454374 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454390 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454407 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454423 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454461 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454476 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454492 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454507 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454522 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454536 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454552 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454569 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454642 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454664 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454685 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454704 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454726 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454742 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454759 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454775 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454791 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454806 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454821 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454835 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454851 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454867 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454885 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454902 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454918 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454936 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454951 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454967 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.454983 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455000 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455016 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455032 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455048 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455066 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455085 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455101 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455117 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455133 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455149 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455165 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455179 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455194 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455211 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455248 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455265 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455284 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455300 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455316 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455338 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455360 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455379 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455400 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455453 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455475 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455497 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455527 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455600 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455624 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455644 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455664 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455685 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455712 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455741 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455764 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455784 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455804 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455820 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455839 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455854 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455870 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455888 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455903 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455959 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455977 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.455993 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456013 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456029 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456046 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456063 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456078 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456096 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456114 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456129 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456146 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456160 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456177 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456194 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456211 4788 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456225 4788 reconstruct.go:97] "Volume reconstruction finished" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.456256 4788 reconciler.go:26] "Reconciler: start to sync state" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.466083 4788 manager.go:324] Recovery completed Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.477214 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.478924 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.478965 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.478979 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.480102 4788 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.480119 4788 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.480157 4788 state_mem.go:36] "Initialized new in-memory state store" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.491824 4788 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.494361 4788 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.494409 4788 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.494437 4788 kubelet.go:2335] "Starting kubelet main sync loop" Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.494491 4788 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.496584 4788 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.238:6443: connect: connection refused Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.496641 4788 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.238:6443: connect: connection refused" logger="UnhandledError" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.497032 4788 policy_none.go:49] "None policy: Start" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.497845 4788 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.497873 4788 state_mem.go:35] "Initializing new in-memory state store" Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.539284 4788 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.562937 4788 manager.go:334] "Starting Device Plugin manager" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.563017 4788 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.563031 4788 server.go:79] "Starting device plugin registration server" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.563677 4788 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.563700 4788 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.563887 4788 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.564042 4788 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.564056 4788 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.571826 4788 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.594975 4788 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.595138 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.596277 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.596347 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.596359 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.596577 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.596832 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.596889 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.598774 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.598835 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.598851 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.600677 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.600709 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.600771 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.601652 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.601782 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.601836 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.603879 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.603933 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.603956 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.603970 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.603985 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.604006 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.604983 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.605299 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.605393 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.606516 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.606549 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.606559 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.606631 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.606662 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.606675 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.606777 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.606935 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.606982 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.607852 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.607887 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.607903 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.607939 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.607956 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.607967 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.608101 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.608127 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.609305 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.609341 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.609353 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.640183 4788 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" interval="400ms" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659288 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659431 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659491 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659578 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659618 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659643 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659721 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659787 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659828 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659856 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659875 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659904 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659936 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659965 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.659994 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.664506 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.665806 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.665856 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.665870 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.665903 4788 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.666627 4788 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.238:6443: connect: connection refused" node="crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761451 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761521 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761548 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761571 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761591 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761611 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761628 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761651 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761689 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761712 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761713 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761762 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761788 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761718 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761813 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761642 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761856 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761868 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761769 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761847 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761722 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761733 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.761992 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.762015 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.762040 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.762056 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.762081 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.762062 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.762113 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.762139 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.867243 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.868729 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.868793 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.868811 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.868858 4788 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 09:21:10 crc kubenswrapper[4788]: E1211 09:21:10.869362 4788 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.238:6443: connect: connection refused" node="crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.940093 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.947845 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.966934 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.985635 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.990721 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-8c8741e38bcc0d7b01db2764be768e4bf966e18aff89a40a0c3e10256db056df WatchSource:0}: Error finding container 8c8741e38bcc0d7b01db2764be768e4bf966e18aff89a40a0c3e10256db056df: Status 404 returned error can't find the container with id 8c8741e38bcc0d7b01db2764be768e4bf966e18aff89a40a0c3e10256db056df Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.991889 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-3407c8debf25a9ae2d10a9592eced8488d1554b8ce309a055ef10a5a0409611d WatchSource:0}: Error finding container 3407c8debf25a9ae2d10a9592eced8488d1554b8ce309a055ef10a5a0409611d: Status 404 returned error can't find the container with id 3407c8debf25a9ae2d10a9592eced8488d1554b8ce309a055ef10a5a0409611d Dec 11 09:21:10 crc kubenswrapper[4788]: W1211 09:21:10.992590 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-11521fce91aca2b0849486456b13352cb46ca682a0fbcf2d1a19316362f52b5b WatchSource:0}: Error finding container 11521fce91aca2b0849486456b13352cb46ca682a0fbcf2d1a19316362f52b5b: Status 404 returned error can't find the container with id 11521fce91aca2b0849486456b13352cb46ca682a0fbcf2d1a19316362f52b5b Dec 11 09:21:10 crc kubenswrapper[4788]: I1211 09:21:10.994201 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:11 crc kubenswrapper[4788]: W1211 09:21:11.000585 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-6a6c7ba24ff1e0c546e7be04074abcbc8b8bf676ce6ad7ab289b8da8322b1e10 WatchSource:0}: Error finding container 6a6c7ba24ff1e0c546e7be04074abcbc8b8bf676ce6ad7ab289b8da8322b1e10: Status 404 returned error can't find the container with id 6a6c7ba24ff1e0c546e7be04074abcbc8b8bf676ce6ad7ab289b8da8322b1e10 Dec 11 09:21:11 crc kubenswrapper[4788]: W1211 09:21:11.011340 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-75aa6192e2fde91c8a9cb2afe5109df58b1878eba9a3787166c4c9c27c74ed22 WatchSource:0}: Error finding container 75aa6192e2fde91c8a9cb2afe5109df58b1878eba9a3787166c4c9c27c74ed22: Status 404 returned error can't find the container with id 75aa6192e2fde91c8a9cb2afe5109df58b1878eba9a3787166c4c9c27c74ed22 Dec 11 09:21:11 crc kubenswrapper[4788]: E1211 09:21:11.041741 4788 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" interval="800ms" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.269751 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.271170 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.271247 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.271263 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.271300 4788 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 09:21:11 crc kubenswrapper[4788]: E1211 09:21:11.271822 4788 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.238:6443: connect: connection refused" node="crc" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.437837 4788 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.238:6443: connect: connection refused Dec 11 09:21:11 crc kubenswrapper[4788]: W1211 09:21:11.466563 4788 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.238:6443: connect: connection refused Dec 11 09:21:11 crc kubenswrapper[4788]: E1211 09:21:11.467082 4788 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.238:6443: connect: connection refused" logger="UnhandledError" Dec 11 09:21:11 crc kubenswrapper[4788]: W1211 09:21:11.497034 4788 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.238:6443: connect: connection refused Dec 11 09:21:11 crc kubenswrapper[4788]: E1211 09:21:11.497146 4788 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.238:6443: connect: connection refused" logger="UnhandledError" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.500845 4788 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5" exitCode=0 Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.500928 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5"} Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.501043 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6a6c7ba24ff1e0c546e7be04074abcbc8b8bf676ce6ad7ab289b8da8322b1e10"} Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.501192 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.502961 4788 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="bf00ad3709ae5e3d4823a8a9dc2761010e3ac9032de9ee4d7f7a939c12fb8ca1" exitCode=0 Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.503037 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"bf00ad3709ae5e3d4823a8a9dc2761010e3ac9032de9ee4d7f7a939c12fb8ca1"} Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.503077 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"11521fce91aca2b0849486456b13352cb46ca682a0fbcf2d1a19316362f52b5b"} Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.503215 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.503301 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.503336 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.503352 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.504155 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.504190 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.504200 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.505134 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.505444 4788 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="4c829fd2d4eb9cca4a6014e7f0ebf13300fa41c03fb647b15d59192a934f9131" exitCode=0 Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.505508 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"4c829fd2d4eb9cca4a6014e7f0ebf13300fa41c03fb647b15d59192a934f9131"} Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.505524 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3407c8debf25a9ae2d10a9592eced8488d1554b8ce309a055ef10a5a0409611d"} Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.505590 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.506067 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.506088 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.506098 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.506534 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.506560 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.506569 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.508345 4788 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="d4f5016f65cb8f21dccb05389dd1a8f8c8d71ec9c791b653f1ecdd6a8c5034ce" exitCode=0 Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.508404 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"d4f5016f65cb8f21dccb05389dd1a8f8c8d71ec9c791b653f1ecdd6a8c5034ce"} Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.508424 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8c8741e38bcc0d7b01db2764be768e4bf966e18aff89a40a0c3e10256db056df"} Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.508542 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.509276 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.509308 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.509319 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:11 crc kubenswrapper[4788]: W1211 09:21:11.509324 4788 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.238:6443: connect: connection refused Dec 11 09:21:11 crc kubenswrapper[4788]: E1211 09:21:11.509386 4788 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.238:6443: connect: connection refused" logger="UnhandledError" Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.510321 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22"} Dec 11 09:21:11 crc kubenswrapper[4788]: I1211 09:21:11.510345 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"75aa6192e2fde91c8a9cb2afe5109df58b1878eba9a3787166c4c9c27c74ed22"} Dec 11 09:21:11 crc kubenswrapper[4788]: W1211 09:21:11.797192 4788 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.238:6443: connect: connection refused Dec 11 09:21:11 crc kubenswrapper[4788]: E1211 09:21:11.797602 4788 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.238:6443: connect: connection refused" logger="UnhandledError" Dec 11 09:21:11 crc kubenswrapper[4788]: E1211 09:21:11.843808 4788 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" interval="1.6s" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.072316 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.073692 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.073776 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.073842 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.073893 4788 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 09:21:12 crc kubenswrapper[4788]: E1211 09:21:12.074639 4788 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.238:6443: connect: connection refused" node="crc" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.438336 4788 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.238:6443: connect: connection refused Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.515738 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"fe05c9ee71547ad72b83166ce41925b34ffd1e4aef988057e5d8ad0995c2d974"} Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.515805 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c1bcee8a3d5a255585372cbdeb4d9d6c8d4b57688f307d210447e1bcfceb8c5f"} Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.515820 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bccbdbe825fd0f3e0438bffa62aadbdfcd8b1ca4868ff1d9cd2bede2ec7a87dc"} Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.515768 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.517102 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.517132 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.517145 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.519372 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771"} Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.519418 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f"} Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.519429 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd"} Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.520694 4788 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="4e58b5563c70913ca7f5346bba9caf3cc352342246023ae0eae74846ab098bcc" exitCode=0 Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.520759 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"4e58b5563c70913ca7f5346bba9caf3cc352342246023ae0eae74846ab098bcc"} Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.520891 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.522157 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.522179 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.522188 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.524444 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"b085376c9f6b2ce289dd52a6abdd00c57b9b5fecd430877f09e74076f5c7f8bc"} Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.524520 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.526053 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.526078 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.526087 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.529928 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c66615b0a8ee71222e3f27d11b24bb2b4ae8d6de9b4975d29a76c81d6890a6e2"} Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.529971 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f862ef259d84a17a106e2c233746b9b32fbeef61e52a2cba89d18b64ddac41d1"} Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.529983 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2750925b5e06a7b0916563fdadca324df442b480c46d688c265861bb8453ad0d"} Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.530071 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.530804 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.530833 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:12 crc kubenswrapper[4788]: I1211 09:21:12.530845 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.534837 4788 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="599c2f8d339ffcae98cfcdff1da60a31f4586ac3cd1cfd355b1ab3e58b5d9e1e" exitCode=0 Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.534963 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"599c2f8d339ffcae98cfcdff1da60a31f4586ac3cd1cfd355b1ab3e58b5d9e1e"} Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.535189 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.539880 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.539970 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.540010 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.545865 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638"} Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.545949 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32"} Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.546070 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.546212 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.550408 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.550454 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.550469 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.550469 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.550508 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.550518 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.599757 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.606041 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.674892 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.676614 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.676658 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.676673 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.676697 4788 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 09:21:13 crc kubenswrapper[4788]: I1211 09:21:13.724104 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.316563 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.553282 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6a1b38a300fb11ef4c0922420ddaf1dc0e4d53eeed5f181775d096967c18fdcd"} Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.553340 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0242fbbe0ab0b67d667c364a814fd1cbc645387d4cfd97233a126a16df6a21ed"} Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.553358 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f0151c96a06824ddd959c65e3f9a8f2435b4f7b134adb597f6b5ac5b7c5dce9b"} Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.553373 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f58b402b6c4cdcb73071b4f725e4b9b64e4b129be1c9eff1d1524903703b6050"} Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.553383 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.553402 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.553480 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.554281 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.554316 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.554328 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.554427 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.554498 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.554511 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:14 crc kubenswrapper[4788]: I1211 09:21:14.927949 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.561946 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4b577e526220694a80e70fc95603432fc156352ce3c2380849130c53a6824336"} Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.562066 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.562079 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.562200 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.563508 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.563540 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.563570 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.563586 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.563568 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.563648 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.563847 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.563878 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:15 crc kubenswrapper[4788]: I1211 09:21:15.563902 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.564730 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.564837 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.566396 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.566474 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.566493 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.567552 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.567611 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.567624 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.888561 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.888973 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.890840 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.890912 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:16 crc kubenswrapper[4788]: I1211 09:21:16.890924 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:17 crc kubenswrapper[4788]: I1211 09:21:17.468346 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:17 crc kubenswrapper[4788]: I1211 09:21:17.567327 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:17 crc kubenswrapper[4788]: I1211 09:21:17.568275 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:17 crc kubenswrapper[4788]: I1211 09:21:17.568317 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:17 crc kubenswrapper[4788]: I1211 09:21:17.568332 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:19 crc kubenswrapper[4788]: I1211 09:21:19.185869 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 11 09:21:19 crc kubenswrapper[4788]: I1211 09:21:19.186131 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:19 crc kubenswrapper[4788]: I1211 09:21:19.187588 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:19 crc kubenswrapper[4788]: I1211 09:21:19.187666 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:19 crc kubenswrapper[4788]: I1211 09:21:19.187683 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:19 crc kubenswrapper[4788]: I1211 09:21:19.889055 4788 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 09:21:19 crc kubenswrapper[4788]: I1211 09:21:19.889264 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 11 09:21:20 crc kubenswrapper[4788]: E1211 09:21:20.571990 4788 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 11 09:21:21 crc kubenswrapper[4788]: I1211 09:21:21.726353 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:21 crc kubenswrapper[4788]: I1211 09:21:21.726524 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:21 crc kubenswrapper[4788]: I1211 09:21:21.728085 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:21 crc kubenswrapper[4788]: I1211 09:21:21.728135 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:21 crc kubenswrapper[4788]: I1211 09:21:21.728145 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:22 crc kubenswrapper[4788]: I1211 09:21:22.256188 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 09:21:22 crc kubenswrapper[4788]: I1211 09:21:22.256435 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:22 crc kubenswrapper[4788]: I1211 09:21:22.260211 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:22 crc kubenswrapper[4788]: I1211 09:21:22.260712 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:22 crc kubenswrapper[4788]: I1211 09:21:22.260721 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:23 crc kubenswrapper[4788]: I1211 09:21:23.439184 4788 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 11 09:21:23 crc kubenswrapper[4788]: E1211 09:21:23.445667 4788 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 11 09:21:23 crc kubenswrapper[4788]: I1211 09:21:23.480980 4788 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 11 09:21:23 crc kubenswrapper[4788]: I1211 09:21:23.481068 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 11 09:21:23 crc kubenswrapper[4788]: I1211 09:21:23.485532 4788 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 11 09:21:23 crc kubenswrapper[4788]: I1211 09:21:23.485610 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 11 09:21:24 crc kubenswrapper[4788]: I1211 09:21:24.937281 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:24 crc kubenswrapper[4788]: I1211 09:21:24.937506 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:24 crc kubenswrapper[4788]: I1211 09:21:24.938980 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:24 crc kubenswrapper[4788]: I1211 09:21:24.939056 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:24 crc kubenswrapper[4788]: I1211 09:21:24.939071 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:24 crc kubenswrapper[4788]: I1211 09:21:24.942767 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.243538 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.243868 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.245529 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.245597 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.245612 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.273852 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.587102 4788 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.587167 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.587127 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.588287 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.588325 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.588340 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.589366 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.589401 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.589413 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:25 crc kubenswrapper[4788]: I1211 09:21:25.601248 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 11 09:21:26 crc kubenswrapper[4788]: I1211 09:21:26.589493 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:26 crc kubenswrapper[4788]: I1211 09:21:26.590680 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:26 crc kubenswrapper[4788]: I1211 09:21:26.590721 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:26 crc kubenswrapper[4788]: I1211 09:21:26.590736 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.467010 4788 trace.go:236] Trace[1095365903]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 09:21:13.727) (total time: 14739ms): Dec 11 09:21:28 crc kubenswrapper[4788]: Trace[1095365903]: ---"Objects listed" error: 14739ms (09:21:28.466) Dec 11 09:21:28 crc kubenswrapper[4788]: Trace[1095365903]: [14.739822855s] [14.739822855s] END Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.467056 4788 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.467608 4788 trace.go:236] Trace[380929108]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 09:21:14.055) (total time: 14411ms): Dec 11 09:21:28 crc kubenswrapper[4788]: Trace[380929108]: ---"Objects listed" error: 14411ms (09:21:28.467) Dec 11 09:21:28 crc kubenswrapper[4788]: Trace[380929108]: [14.411955527s] [14.411955527s] END Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.467696 4788 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.471779 4788 trace.go:236] Trace[636756478]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 09:21:14.282) (total time: 14189ms): Dec 11 09:21:28 crc kubenswrapper[4788]: Trace[636756478]: ---"Objects listed" error: 14188ms (09:21:28.471) Dec 11 09:21:28 crc kubenswrapper[4788]: Trace[636756478]: [14.189097947s] [14.189097947s] END Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.471817 4788 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.474832 4788 trace.go:236] Trace[281828297]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 09:21:14.286) (total time: 14188ms): Dec 11 09:21:28 crc kubenswrapper[4788]: Trace[281828297]: ---"Objects listed" error: 14188ms (09:21:28.474) Dec 11 09:21:28 crc kubenswrapper[4788]: Trace[281828297]: [14.188599238s] [14.188599238s] END Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.474864 4788 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.477620 4788 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 11 09:21:28 crc kubenswrapper[4788]: E1211 09:21:28.479387 4788 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.524484 4788 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:54570->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.524588 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:54570->192.168.126.11:17697: read: connection reset by peer" Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.525082 4788 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.525132 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.796556 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:28 crc kubenswrapper[4788]: I1211 09:21:28.809937 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.435612 4788 apiserver.go:52] "Watching apiserver" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.443637 4788 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.444099 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.444643 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.444724 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.444890 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.445046 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.445044 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.445188 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.445277 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.445377 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.445451 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.447800 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.447848 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.447864 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.447881 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.448553 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.448740 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.448843 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.448901 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.448968 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.474136 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.489580 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.500794 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.511434 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.521986 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.532807 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d8c45-aadc-4a18-b9a0-806b375e3ea3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bccbdbe825fd0f3e0438bffa62aadbdfcd8b1ca4868ff1d9cd2bede2ec7a87dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1bcee8a3d5a255585372cbdeb4d9d6c8d4b57688f307d210447e1bcfceb8c5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe05c9ee71547ad72b83166ce41925b34ffd1e4aef988057e5d8ad0995c2d974\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.542992 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.552487 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.563620 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.630181 4788 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.634609 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.637558 4788 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638" exitCode=255 Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.637619 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638"} Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.657316 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.657683 4788 scope.go:117] "RemoveContainer" containerID="ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638" Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.662593 4788 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.666900 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.682873 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.696319 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.716175 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d8c45-aadc-4a18-b9a0-806b375e3ea3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bccbdbe825fd0f3e0438bffa62aadbdfcd8b1ca4868ff1d9cd2bede2ec7a87dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1bcee8a3d5a255585372cbdeb4d9d6c8d4b57688f307d210447e1bcfceb8c5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe05c9ee71547ad72b83166ce41925b34ffd1e4aef988057e5d8ad0995c2d974\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.719941 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-l5m4p"] Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.720427 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-l5m4p" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.725691 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.725717 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.726732 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-9wzpd"] Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.727299 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-9wzpd" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730292 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730330 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730356 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730380 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730402 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730420 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730545 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730564 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730582 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730605 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730631 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730651 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730667 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730689 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730712 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730773 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730801 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730827 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730842 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730859 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730877 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730897 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730918 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730945 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.730968 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731025 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731045 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731066 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731104 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731124 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731147 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731168 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731190 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731212 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731257 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731276 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731296 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731315 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731334 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731353 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731378 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731407 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731427 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731448 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731467 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731488 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731509 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731528 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731548 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731571 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731596 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731623 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.731653 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.733167 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.733213 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.733680 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.733963 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.733955 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.734210 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.734246 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.734319 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.734640 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.735024 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.735324 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.735593 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.735673 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.735710 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.735750 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.735801 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.735834 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.735870 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.735893 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.735903 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.736006 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.736035 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.736051 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.736097 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.736146 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.736151 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.736179 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.736586 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.736881 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.737086 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.737318 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:30.237284711 +0000 UTC m=+20.308064297 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.737340 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.737414 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.737460 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.737495 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.737618 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.738156 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.738564 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.740569 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.737412 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.737435 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.737848 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.737904 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.738031 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.738392 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.738410 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.738497 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.738599 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.738726 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.739096 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.739713 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.740122 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.740207 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.741009 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.741297 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.741588 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.741920 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.741970 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.742170 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.742466 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.745508 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.742851 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.742947 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.743104 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.737033 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.743992 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.746013 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.746451 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.746481 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.746549 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.746740 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.746789 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.746860 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.747096 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.747711 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.747614 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.747768 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.748080 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.748120 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.748952 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749060 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749222 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749353 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749403 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749430 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749432 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749476 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749503 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749542 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749570 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749568 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749583 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749612 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749635 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749661 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.750054 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.752577 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.753721 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.754329 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.754721 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749881 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.754889 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.755467 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.756674 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.756812 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.759558 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.760608 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.768171 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.768646 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.785805 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.786171 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.786843 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.749704 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787476 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787521 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787549 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787585 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787606 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787628 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787664 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787683 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787701 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787739 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787756 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787772 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787788 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787823 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787839 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787855 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787896 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787915 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787930 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787964 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.787982 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788003 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788019 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788054 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788083 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788127 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788152 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788178 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788217 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788255 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788270 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788291 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788321 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788338 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788354 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788401 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788420 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788441 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788475 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788494 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788521 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788582 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788611 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788650 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788669 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788689 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788729 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788747 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788766 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788800 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788823 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788845 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788887 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788907 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788924 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.788957 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.789002 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.789035 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.789052 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.789069 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.789084 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.789117 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.789133 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.789149 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.789535 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.790896 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.791211 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.791578 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.791910 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.792139 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.792812 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.793182 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.793454 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.793833 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.794425 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.794866 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.795204 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.795483 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.795515 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.795694 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.795763 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.795880 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.795904 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.795983 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.796194 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.796393 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.796686 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.796902 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.797767 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.798064 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.798669 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.799450 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.799635 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.800267 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.800585 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.800655 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.800712 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.800746 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.800775 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.800805 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.800996 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.801346 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.801380 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.801410 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.801641 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.801794 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.801772 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.801819 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.802008 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.802174 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.802182 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.802450 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.802511 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.802634 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.802656 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.802722 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.802813 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.803751 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804183 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804256 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804286 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804313 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804338 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804369 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804394 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804418 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804444 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804453 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804472 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804502 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804529 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804555 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804583 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804613 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804636 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804665 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804689 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.804714 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.805024 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.805107 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.805195 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.805834 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806017 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806076 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806105 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806125 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806172 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806201 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806298 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806325 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806369 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806397 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806421 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806462 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806483 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806532 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806502 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806631 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806720 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806806 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806871 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.806910 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.808207 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.808499 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.808533 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.808879 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.811487 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.811837 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.812077 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.812364 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.812551 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.813772 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.814270 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.814493 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.814485 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.815430 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.815494 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.815596 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.815705 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.815805 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.815911 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.816102 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.816215 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d9049f11-e2f9-4ef4-a2b9-a783604a15d2-hosts-file\") pod \"node-resolver-l5m4p\" (UID: \"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\") " pod="openshift-dns/node-resolver-l5m4p" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.816127 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.816839 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.817094 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.817107 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.817123 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.817462 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.817729 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.817937 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.818193 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.818459 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.818520 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.818596 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.818844 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.822739 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-944s8\" (UniqueName: \"kubernetes.io/projected/8a0cccaa-c739-4361-a1ef-0dfac45097c7-kube-api-access-944s8\") pod \"node-ca-9wzpd\" (UID: \"8a0cccaa-c739-4361-a1ef-0dfac45097c7\") " pod="openshift-image-registry/node-ca-9wzpd" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825650 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825699 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8a0cccaa-c739-4361-a1ef-0dfac45097c7-host\") pod \"node-ca-9wzpd\" (UID: \"8a0cccaa-c739-4361-a1ef-0dfac45097c7\") " pod="openshift-image-registry/node-ca-9wzpd" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825740 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825770 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825796 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825821 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825849 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5xlx\" (UniqueName: \"kubernetes.io/projected/d9049f11-e2f9-4ef4-a2b9-a783604a15d2-kube-api-access-s5xlx\") pod \"node-resolver-l5m4p\" (UID: \"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\") " pod="openshift-dns/node-resolver-l5m4p" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825874 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825907 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825941 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825972 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826004 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826031 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8a0cccaa-c739-4361-a1ef-0dfac45097c7-serviceca\") pod \"node-ca-9wzpd\" (UID: \"8a0cccaa-c739-4361-a1ef-0dfac45097c7\") " pod="openshift-image-registry/node-ca-9wzpd" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826061 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826104 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826269 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826301 4788 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826316 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826330 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826344 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826357 4788 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826370 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826384 4788 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826404 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826419 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826432 4788 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826445 4788 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826457 4788 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826469 4788 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826481 4788 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826493 4788 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826508 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826520 4788 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826533 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826551 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826565 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826578 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826589 4788 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826599 4788 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826610 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826622 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826635 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826647 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826661 4788 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826672 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826684 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826695 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826707 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826718 4788 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826730 4788 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826745 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826759 4788 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826772 4788 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826784 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826799 4788 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826812 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826823 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826836 4788 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826848 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826860 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826871 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826882 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826895 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826907 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826918 4788 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826949 4788 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826961 4788 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826975 4788 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.826986 4788 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827000 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827012 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827025 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827038 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827052 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827068 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827084 4788 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827100 4788 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827113 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827126 4788 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827139 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827151 4788 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827162 4788 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827174 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827186 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827197 4788 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827207 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827219 4788 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827254 4788 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827265 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827275 4788 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827285 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827296 4788 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827307 4788 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827317 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827328 4788 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827343 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827354 4788 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827367 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827378 4788 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827389 4788 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827400 4788 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827412 4788 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827424 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827437 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827448 4788 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827462 4788 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827474 4788 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827485 4788 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827495 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827506 4788 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827517 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827530 4788 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827542 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827554 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827565 4788 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827578 4788 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827605 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827618 4788 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827630 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827646 4788 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827658 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827670 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827682 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827694 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827705 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827717 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827728 4788 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827743 4788 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827756 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827767 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827779 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827791 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827802 4788 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827813 4788 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827826 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827841 4788 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827917 4788 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827958 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827972 4788 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827983 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827996 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828012 4788 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828024 4788 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828036 4788 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828048 4788 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828060 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828073 4788 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828085 4788 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828097 4788 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828110 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828124 4788 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828137 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828150 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828162 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828173 4788 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828184 4788 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828195 4788 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828206 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.819564 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.819690 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.819975 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.820538 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.820678 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.820905 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.821738 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.821830 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.822010 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.822414 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.822696 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.823182 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.823360 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.823943 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825292 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.825556 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.827785 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828874 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.828950 4788 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.829022 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:30.329002503 +0000 UTC m=+20.399782299 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.829068 4788 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.829816 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.830061 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:30.330049329 +0000 UTC m=+20.400829125 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.828800 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.831965 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832116 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832139 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832152 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832168 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832182 4788 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832196 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832209 4788 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832221 4788 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832261 4788 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832275 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832289 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832303 4788 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832315 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.832328 4788 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.835385 4788 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.836403 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.839316 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.840121 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.840500 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.841455 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.841636 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.842939 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.849898 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.850688 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.853914 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.854274 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.854710 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.856419 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.859121 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.861377 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.862647 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.862694 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.862717 4788 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.862806 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:30.362778817 +0000 UTC m=+20.433558403 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.866714 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.866736 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.866751 4788 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:29 crc kubenswrapper[4788]: E1211 09:21:29.866816 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:30.366793937 +0000 UTC m=+20.437573513 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.867405 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.867792 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.869731 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.872910 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.873352 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.878367 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.889424 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.898530 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.920182 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.922249 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.922591 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933500 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8a0cccaa-c739-4361-a1ef-0dfac45097c7-host\") pod \"node-ca-9wzpd\" (UID: \"8a0cccaa-c739-4361-a1ef-0dfac45097c7\") " pod="openshift-image-registry/node-ca-9wzpd" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933647 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933669 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5xlx\" (UniqueName: \"kubernetes.io/projected/d9049f11-e2f9-4ef4-a2b9-a783604a15d2-kube-api-access-s5xlx\") pod \"node-resolver-l5m4p\" (UID: \"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\") " pod="openshift-dns/node-resolver-l5m4p" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933689 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933713 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8a0cccaa-c739-4361-a1ef-0dfac45097c7-serviceca\") pod \"node-ca-9wzpd\" (UID: \"8a0cccaa-c739-4361-a1ef-0dfac45097c7\") " pod="openshift-image-registry/node-ca-9wzpd" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933739 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d9049f11-e2f9-4ef4-a2b9-a783604a15d2-hosts-file\") pod \"node-resolver-l5m4p\" (UID: \"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\") " pod="openshift-dns/node-resolver-l5m4p" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933753 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-944s8\" (UniqueName: \"kubernetes.io/projected/8a0cccaa-c739-4361-a1ef-0dfac45097c7-kube-api-access-944s8\") pod \"node-ca-9wzpd\" (UID: \"8a0cccaa-c739-4361-a1ef-0dfac45097c7\") " pod="openshift-image-registry/node-ca-9wzpd" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933810 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933822 4788 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933833 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933844 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933856 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933865 4788 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933875 4788 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933884 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933894 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933903 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933911 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933920 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933929 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933938 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933946 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933958 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933968 4788 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933979 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933988 4788 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933998 4788 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934007 4788 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934016 4788 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934026 4788 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934035 4788 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934044 4788 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934054 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934062 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934070 4788 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934080 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934090 4788 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934100 4788 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934109 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934123 4788 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934133 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934144 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934154 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934165 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.933589 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8a0cccaa-c739-4361-a1ef-0dfac45097c7-host\") pod \"node-ca-9wzpd\" (UID: \"8a0cccaa-c739-4361-a1ef-0dfac45097c7\") " pod="openshift-image-registry/node-ca-9wzpd" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934546 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.934809 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.935929 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8a0cccaa-c739-4361-a1ef-0dfac45097c7-serviceca\") pod \"node-ca-9wzpd\" (UID: \"8a0cccaa-c739-4361-a1ef-0dfac45097c7\") " pod="openshift-image-registry/node-ca-9wzpd" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.936089 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d9049f11-e2f9-4ef4-a2b9-a783604a15d2-hosts-file\") pod \"node-resolver-l5m4p\" (UID: \"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\") " pod="openshift-dns/node-resolver-l5m4p" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.938206 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.957382 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-944s8\" (UniqueName: \"kubernetes.io/projected/8a0cccaa-c739-4361-a1ef-0dfac45097c7-kube-api-access-944s8\") pod \"node-ca-9wzpd\" (UID: \"8a0cccaa-c739-4361-a1ef-0dfac45097c7\") " pod="openshift-image-registry/node-ca-9wzpd" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.958179 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.958604 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5xlx\" (UniqueName: \"kubernetes.io/projected/d9049f11-e2f9-4ef4-a2b9-a783604a15d2-kube-api-access-s5xlx\") pod \"node-resolver-l5m4p\" (UID: \"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\") " pod="openshift-dns/node-resolver-l5m4p" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.970178 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l5m4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l5m4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:29 crc kubenswrapper[4788]: I1211 09:21:29.994112 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-9wzpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0cccaa-c739-4361-a1ef-0dfac45097c7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-944s8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-9wzpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.016272 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36b7ed51-7958-44ed-94a5-17d3f2d6c3a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1211 09:21:23.104837 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 09:21:23.131349 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-749709705/tls.crt::/tmp/serving-cert-749709705/tls.key\\\\\\\"\\\\nI1211 09:21:28.486991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 09:21:28.492493 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 09:21:28.492552 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 09:21:28.492586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 09:21:28.492596 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 09:21:28.511780 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 09:21:28.511811 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 09:21:28.511825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 09:21:28.511828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 09:21:28.511830 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 09:21:28.511856 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 09:21:28.515159 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.033883 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.046730 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.061979 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.062063 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d8c45-aadc-4a18-b9a0-806b375e3ea3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bccbdbe825fd0f3e0438bffa62aadbdfcd8b1ca4868ff1d9cd2bede2ec7a87dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1bcee8a3d5a255585372cbdeb4d9d6c8d4b57688f307d210447e1bcfceb8c5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe05c9ee71547ad72b83166ce41925b34ffd1e4aef988057e5d8ad0995c2d974\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.068407 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.072661 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.077877 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.088459 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: W1211 09:21:30.105646 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-2f212f48d87f17f762f9b314f3079c7cf69d3ace634450e9a822487b54d1af15 WatchSource:0}: Error finding container 2f212f48d87f17f762f9b314f3079c7cf69d3ace634450e9a822487b54d1af15: Status 404 returned error can't find the container with id 2f212f48d87f17f762f9b314f3079c7cf69d3ace634450e9a822487b54d1af15 Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.115863 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-l5m4p" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.139254 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-9wzpd" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.177741 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-b5z5h"] Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.179033 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.182967 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.183738 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.184087 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.184245 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.185553 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.204739 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d8c45-aadc-4a18-b9a0-806b375e3ea3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bccbdbe825fd0f3e0438bffa62aadbdfcd8b1ca4868ff1d9cd2bede2ec7a87dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1bcee8a3d5a255585372cbdeb4d9d6c8d4b57688f307d210447e1bcfceb8c5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe05c9ee71547ad72b83166ce41925b34ffd1e4aef988057e5d8ad0995c2d974\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.216040 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.228040 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.238767 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: W1211 09:21:30.251424 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a0cccaa_c739_4361_a1ef_0dfac45097c7.slice/crio-c6b92f995bbd57c478d55110c0c5ff386005d539836d803ef689d5107bedbc9a WatchSource:0}: Error finding container c6b92f995bbd57c478d55110c0c5ff386005d539836d803ef689d5107bedbc9a: Status 404 returned error can't find the container with id c6b92f995bbd57c478d55110c0c5ff386005d539836d803ef689d5107bedbc9a Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.254856 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l5m4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l5m4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.329752 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6eaad5db-9692-4fdb-982f-22fd2703e0b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w8xd5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w8xd5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-b5z5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.339807 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.339961 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.340004 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6eaad5db-9692-4fdb-982f-22fd2703e0b0-mcd-auth-proxy-config\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.340033 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6eaad5db-9692-4fdb-982f-22fd2703e0b0-proxy-tls\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.340080 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.340125 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6eaad5db-9692-4fdb-982f-22fd2703e0b0-rootfs\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.340146 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8xd5\" (UniqueName: \"kubernetes.io/projected/6eaad5db-9692-4fdb-982f-22fd2703e0b0-kube-api-access-w8xd5\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.340324 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:31.340295679 +0000 UTC m=+21.411075265 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.340422 4788 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.340479 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:31.340470043 +0000 UTC m=+21.411249629 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.341021 4788 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.341069 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:31.341057648 +0000 UTC m=+21.411837234 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.368270 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.412590 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.438205 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.442778 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6eaad5db-9692-4fdb-982f-22fd2703e0b0-proxy-tls\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.442822 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.442856 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.442880 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6eaad5db-9692-4fdb-982f-22fd2703e0b0-rootfs\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.442915 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8xd5\" (UniqueName: \"kubernetes.io/projected/6eaad5db-9692-4fdb-982f-22fd2703e0b0-kube-api-access-w8xd5\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.442951 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6eaad5db-9692-4fdb-982f-22fd2703e0b0-mcd-auth-proxy-config\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.443599 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6eaad5db-9692-4fdb-982f-22fd2703e0b0-mcd-auth-proxy-config\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.444380 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.444415 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.444432 4788 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.444497 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:31.444474332 +0000 UTC m=+21.515253918 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.444561 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.444623 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.444639 4788 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:30 crc kubenswrapper[4788]: E1211 09:21:30.444688 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:31.444673897 +0000 UTC m=+21.515453483 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.444724 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6eaad5db-9692-4fdb-982f-22fd2703e0b0-rootfs\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.447853 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6eaad5db-9692-4fdb-982f-22fd2703e0b0-proxy-tls\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.462024 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36b7ed51-7958-44ed-94a5-17d3f2d6c3a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1211 09:21:23.104837 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 09:21:23.131349 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-749709705/tls.crt::/tmp/serving-cert-749709705/tls.key\\\\\\\"\\\\nI1211 09:21:28.486991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 09:21:28.492493 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 09:21:28.492552 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 09:21:28.492586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 09:21:28.492596 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 09:21:28.511780 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 09:21:28.511811 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 09:21:28.511825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 09:21:28.511828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 09:21:28.511830 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 09:21:28.511856 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 09:21:28.515159 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.483573 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-9wzpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0cccaa-c739-4361-a1ef-0dfac45097c7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-944s8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-9wzpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.486076 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8xd5\" (UniqueName: \"kubernetes.io/projected/6eaad5db-9692-4fdb-982f-22fd2703e0b0-kube-api-access-w8xd5\") pod \"machine-config-daemon-b5z5h\" (UID: \"6eaad5db-9692-4fdb-982f-22fd2703e0b0\") " pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.501684 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.501821 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.502550 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.504105 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.504762 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.505818 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.506418 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.507061 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.508106 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.508952 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.510963 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.511763 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.512026 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.513980 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.514647 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.526851 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.527566 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.529215 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.530246 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.536552 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.537502 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.538784 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.539476 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.540099 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.541180 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.541899 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.542051 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.543315 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.543981 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.545118 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.545653 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.546785 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.547346 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.547921 4788 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.548036 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.551652 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.552659 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.554025 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.564057 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6eaad5db-9692-4fdb-982f-22fd2703e0b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w8xd5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w8xd5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-b5z5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.568592 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.569793 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.572124 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.574839 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.576166 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.576832 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.578129 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.579488 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.580427 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.584483 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.585295 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.585995 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.586438 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.587504 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.588574 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.589163 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.593765 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.595002 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.595836 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.598135 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.612763 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36b7ed51-7958-44ed-94a5-17d3f2d6c3a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1211 09:21:23.104837 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 09:21:23.131349 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-749709705/tls.crt::/tmp/serving-cert-749709705/tls.key\\\\\\\"\\\\nI1211 09:21:28.486991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 09:21:28.492493 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 09:21:28.492552 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 09:21:28.492586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 09:21:28.492596 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 09:21:28.511780 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 09:21:28.511811 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 09:21:28.511825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 09:21:28.511828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 09:21:28.511830 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 09:21:28.511856 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 09:21:28.515159 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.628448 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-gtkxj"] Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.629171 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.630663 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-9wzpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0cccaa-c739-4361-a1ef-0dfac45097c7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-944s8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-9wzpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.631160 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-twvxc"] Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.632124 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-2hwsx"] Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.632276 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.633757 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.635840 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.636064 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.637327 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.637389 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.637455 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.637572 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.637631 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.637709 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.637779 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.637795 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.637943 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.638105 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.638585 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.645115 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.650208 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d8c45-aadc-4a18-b9a0-806b375e3ea3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bccbdbe825fd0f3e0438bffa62aadbdfcd8b1ca4868ff1d9cd2bede2ec7a87dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1bcee8a3d5a255585372cbdeb4d9d6c8d4b57688f307d210447e1bcfceb8c5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe05c9ee71547ad72b83166ce41925b34ffd1e4aef988057e5d8ad0995c2d974\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.654129 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-9wzpd" event={"ID":"8a0cccaa-c739-4361-a1ef-0dfac45097c7","Type":"ContainerStarted","Data":"c6b92f995bbd57c478d55110c0c5ff386005d539836d803ef689d5107bedbc9a"} Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.659272 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"683854b24c7d4a64aa60f3e5791c535401d7a49cb4148cf643812e6bfa8a3d35"} Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.659344 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"2f212f48d87f17f762f9b314f3079c7cf69d3ace634450e9a822487b54d1af15"} Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.665659 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.667482 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.670593 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88"} Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.670971 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.671976 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"13fff03967ad24d17eb28a56e8111a2bdac33c15685f105983020f259da77526"} Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.675592 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"13745c02efee94e7962f18db2616ab52f24db46abf08118a1523d82b9dd8118d"} Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.675658 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4baaa0fb1239fe648e7bac011ba2d5588262433d915fc8313a4da40fdc0236d6"} Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.678406 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-l5m4p" event={"ID":"d9049f11-e2f9-4ef4-a2b9-a783604a15d2","Type":"ContainerStarted","Data":"68569f2228ff8557815a596034f689265211e4d2e7299e5562c515e22f00c3b3"} Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.678950 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l5m4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l5m4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.680359 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9717c84af72883a52395ccc5c491a988faf250807c0e19170ff458e6b0afa42e"} Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.690821 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.703640 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.714908 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d8c45-aadc-4a18-b9a0-806b375e3ea3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bccbdbe825fd0f3e0438bffa62aadbdfcd8b1ca4868ff1d9cd2bede2ec7a87dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1bcee8a3d5a255585372cbdeb4d9d6c8d4b57688f307d210447e1bcfceb8c5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe05c9ee71547ad72b83166ce41925b34ffd1e4aef988057e5d8ad0995c2d974\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.725541 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l5m4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l5m4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.739609 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.748814 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-systemd-units\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.748919 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-netns\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.748965 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-systemd\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749028 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-var-lib-kubelet\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749066 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-run-multus-certs\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749173 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-bin\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749218 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-cnibin\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749299 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-ovn-kubernetes\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749338 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f42b230e-2bb1-4711-894b-281454b998de-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749438 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-var-lib-openvswitch\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749484 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-etc-openvswitch\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749522 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-openvswitch\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749558 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-multus-socket-dir-parent\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749589 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-run-netns\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749628 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-multus-conf-dir\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749669 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n94k\" (UniqueName: \"kubernetes.io/projected/a3cea019-16b2-4a01-a945-cd2b37745330-kube-api-access-8n94k\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749704 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3541a671-d810-482f-bf54-71e8f344b788-ovn-node-metrics-cert\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749732 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-script-lib\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749767 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-os-release\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749805 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-netd\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749839 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rncsg\" (UniqueName: \"kubernetes.io/projected/f42b230e-2bb1-4711-894b-281454b998de-kube-api-access-rncsg\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749871 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-system-cni-dir\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749903 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-ovn\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.749980 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlknk\" (UniqueName: \"kubernetes.io/projected/3541a671-d810-482f-bf54-71e8f344b788-kube-api-access-wlknk\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750014 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-cnibin\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750067 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-os-release\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750105 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a3cea019-16b2-4a01-a945-cd2b37745330-cni-binary-copy\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750144 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-var-lib-cni-multus\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750200 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-env-overrides\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750250 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-kubelet\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750286 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750324 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f42b230e-2bb1-4711-894b-281454b998de-cni-binary-copy\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750360 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-etc-kubernetes\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750411 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-system-cni-dir\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750448 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-multus-cni-dir\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750516 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-var-lib-cni-bin\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750550 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-node-log\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750593 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a3cea019-16b2-4a01-a945-cd2b37745330-multus-daemon-config\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750627 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-log-socket\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750662 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750700 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-hostroot\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750730 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-slash\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.750758 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-config\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.751180 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-run-k8s-cni-cncf-io\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.760599 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.773599 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.788446 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.805138 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6eaad5db-9692-4fdb-982f-22fd2703e0b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w8xd5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w8xd5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-b5z5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.828251 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gtkxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3cea019-16b2-4a01-a945-cd2b37745330\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n94k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gtkxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.847612 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3541a671-d810-482f-bf54-71e8f344b788\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.851903 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-etc-kubernetes\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.851976 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f42b230e-2bb1-4711-894b-281454b998de-cni-binary-copy\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852086 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-system-cni-dir\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852120 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-var-lib-cni-bin\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852146 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-node-log\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852162 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-system-cni-dir\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852200 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-multus-cni-dir\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852244 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852278 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a3cea019-16b2-4a01-a945-cd2b37745330-multus-daemon-config\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852294 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-node-log\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852300 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-log-socket\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852359 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-log-socket\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852365 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-config\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852420 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-run-k8s-cni-cncf-io\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852446 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-hostroot\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852473 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-slash\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852501 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-systemd\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852530 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-systemd-units\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852548 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-netns\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852570 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-run-multus-certs\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852588 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-bin\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852605 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-cnibin\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852638 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-var-lib-kubelet\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852662 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-var-lib-openvswitch\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852688 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-ovn-kubernetes\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852705 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f42b230e-2bb1-4711-894b-281454b998de-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852735 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-multus-socket-dir-parent\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852767 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-run-netns\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852799 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-multus-conf-dir\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852825 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-etc-openvswitch\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852822 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f42b230e-2bb1-4711-894b-281454b998de-cni-binary-copy\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852853 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-openvswitch\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852888 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-openvswitch\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852913 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3541a671-d810-482f-bf54-71e8f344b788-ovn-node-metrics-cert\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852946 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-script-lib\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852955 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-var-lib-cni-bin\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852980 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-os-release\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853019 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n94k\" (UniqueName: \"kubernetes.io/projected/a3cea019-16b2-4a01-a945-cd2b37745330-kube-api-access-8n94k\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853046 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-system-cni-dir\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853071 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-ovn\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853114 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-netd\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853139 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rncsg\" (UniqueName: \"kubernetes.io/projected/f42b230e-2bb1-4711-894b-281454b998de-kube-api-access-rncsg\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853187 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlknk\" (UniqueName: \"kubernetes.io/projected/3541a671-d810-482f-bf54-71e8f344b788-kube-api-access-wlknk\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853216 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a3cea019-16b2-4a01-a945-cd2b37745330-cni-binary-copy\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853265 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-var-lib-cni-multus\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853301 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-cnibin\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853320 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-os-release\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853345 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-kubelet\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853369 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853394 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-env-overrides\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853603 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-cnibin\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853658 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-run-k8s-cni-cncf-io\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852971 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-multus-cni-dir\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853705 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-netns\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853747 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-run-multus-certs\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853785 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-bin\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853800 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-script-lib\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853828 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-cnibin\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853817 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-slash\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853839 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-systemd-units\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853916 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-hostroot\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853950 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-systemd\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.853953 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-kubelet\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854003 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-os-release\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.852100 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-etc-kubernetes\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854037 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-os-release\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854078 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-config\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854097 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854128 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-system-cni-dir\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854195 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-var-lib-cni-multus\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854145 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-ovn\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854274 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-var-lib-kubelet\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854210 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-host-run-netns\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854266 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-etc-openvswitch\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854171 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-netd\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854167 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-multus-socket-dir-parent\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854329 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a3cea019-16b2-4a01-a945-cd2b37745330-multus-conf-dir\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854355 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-var-lib-openvswitch\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854389 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-ovn-kubernetes\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854888 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f42b230e-2bb1-4711-894b-281454b998de-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.854846 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a3cea019-16b2-4a01-a945-cd2b37745330-cni-binary-copy\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.855064 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a3cea019-16b2-4a01-a945-cd2b37745330-multus-daemon-config\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.855306 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-env-overrides\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.856493 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f42b230e-2bb1-4711-894b-281454b998de-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.866182 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3541a671-d810-482f-bf54-71e8f344b788-ovn-node-metrics-cert\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.882438 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13745c02efee94e7962f18db2616ab52f24db46abf08118a1523d82b9dd8118d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.883413 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlknk\" (UniqueName: \"kubernetes.io/projected/3541a671-d810-482f-bf54-71e8f344b788-kube-api-access-wlknk\") pod \"ovnkube-node-twvxc\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.883842 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n94k\" (UniqueName: \"kubernetes.io/projected/a3cea019-16b2-4a01-a945-cd2b37745330-kube-api-access-8n94k\") pod \"multus-gtkxj\" (UID: \"a3cea019-16b2-4a01-a945-cd2b37745330\") " pod="openshift-multus/multus-gtkxj" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.886942 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rncsg\" (UniqueName: \"kubernetes.io/projected/f42b230e-2bb1-4711-894b-281454b998de-kube-api-access-rncsg\") pod \"multus-additional-cni-plugins-2hwsx\" (UID: \"f42b230e-2bb1-4711-894b-281454b998de\") " pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.908927 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.930582 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-9wzpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0cccaa-c739-4361-a1ef-0dfac45097c7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-944s8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-9wzpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.973602 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b230e-2bb1-4711-894b-281454b998de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2hwsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:30 crc kubenswrapper[4788]: I1211 09:21:30.990263 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36b7ed51-7958-44ed-94a5-17d3f2d6c3a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1211 09:21:23.104837 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 09:21:23.131349 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-749709705/tls.crt::/tmp/serving-cert-749709705/tls.key\\\\\\\"\\\\nI1211 09:21:28.486991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 09:21:28.492493 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 09:21:28.492552 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 09:21:28.492586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 09:21:28.492596 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 09:21:28.511780 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 09:21:28.511811 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 09:21:28.511825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 09:21:28.511828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 09:21:28.511830 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 09:21:28.511856 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 09:21:28.515159 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.119594 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gtkxj" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.132908 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.139622 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" Dec 11 09:21:31 crc kubenswrapper[4788]: W1211 09:21:31.166433 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3541a671_d810_482f_bf54_71e8f344b788.slice/crio-63fbb2ae13e61224c0d33da7c84d5f83f442e3e4288944ef566f9070b8284f88 WatchSource:0}: Error finding container 63fbb2ae13e61224c0d33da7c84d5f83f442e3e4288944ef566f9070b8284f88: Status 404 returned error can't find the container with id 63fbb2ae13e61224c0d33da7c84d5f83f442e3e4288944ef566f9070b8284f88 Dec 11 09:21:31 crc kubenswrapper[4788]: W1211 09:21:31.167176 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf42b230e_2bb1_4711_894b_281454b998de.slice/crio-e2b7217ed17f4f1b1086d0c555a5e653e72613216ec16b1566c6ee634a3edc95 WatchSource:0}: Error finding container e2b7217ed17f4f1b1086d0c555a5e653e72613216ec16b1566c6ee634a3edc95: Status 404 returned error can't find the container with id e2b7217ed17f4f1b1086d0c555a5e653e72613216ec16b1566c6ee634a3edc95 Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.358588 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.358879 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.358910 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:33.35887036 +0000 UTC m=+23.429649946 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.358996 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.359033 4788 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.359109 4788 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.359113 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:33.359093245 +0000 UTC m=+23.429872831 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.359176 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:33.359165417 +0000 UTC m=+23.429945003 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.460187 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.460321 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.460397 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.460434 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.460448 4788 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.460499 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.460517 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:33.460495349 +0000 UTC m=+23.531274935 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.460522 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.460542 4788 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.460608 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:33.460588212 +0000 UTC m=+23.531367788 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.497422 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.497583 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.497716 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.497894 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.497971 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.498299 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.680419 4788 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.687474 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.687524 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.687535 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.687734 4788 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.698694 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a740254f68676e0d7c0cb460b03666a72534f0ac2c35c4742b638dc24e397930"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.700877 4788 generic.go:334] "Generic (PLEG): container finished" podID="f42b230e-2bb1-4711-894b-281454b998de" containerID="33d1a006e4579469d017ed68b1cc316432f8361e83abb286843ff187890d6a40" exitCode=0 Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.700939 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" event={"ID":"f42b230e-2bb1-4711-894b-281454b998de","Type":"ContainerDied","Data":"33d1a006e4579469d017ed68b1cc316432f8361e83abb286843ff187890d6a40"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.700963 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" event={"ID":"f42b230e-2bb1-4711-894b-281454b998de","Type":"ContainerStarted","Data":"e2b7217ed17f4f1b1086d0c555a5e653e72613216ec16b1566c6ee634a3edc95"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.704030 4788 generic.go:334] "Generic (PLEG): container finished" podID="3541a671-d810-482f-bf54-71e8f344b788" containerID="2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940" exitCode=0 Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.704174 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerDied","Data":"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.704216 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerStarted","Data":"63fbb2ae13e61224c0d33da7c84d5f83f442e3e4288944ef566f9070b8284f88"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.706262 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-l5m4p" event={"ID":"d9049f11-e2f9-4ef4-a2b9-a783604a15d2","Type":"ContainerStarted","Data":"f4e48c30c48bb3480b8db75d2d67dbc8c20fc75a453a06b90252de52d56f4267"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.714684 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gtkxj" event={"ID":"a3cea019-16b2-4a01-a945-cd2b37745330","Type":"ContainerStarted","Data":"fc22f8b9744bc5587881d46a768d97e9eef1660f056492bc4b117e2eb7c5040c"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.714886 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gtkxj" event={"ID":"a3cea019-16b2-4a01-a945-cd2b37745330","Type":"ContainerStarted","Data":"d275a422dd2b3187b8ed858bdd41ef108b541b81fd27f8ec15dc017176532f7d"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.717110 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"306cd79e54a2857d01eb61d4e5c2576a4625e4eadab176013b50b986d0d17aa8"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.717200 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"607373e5c50f16bf23e11020a8b1f6a12dbe8b05ab7cbb3b21540e1d0962cdd3"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.718027 4788 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.718421 4788 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.718805 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-9wzpd" event={"ID":"8a0cccaa-c739-4361-a1ef-0dfac45097c7","Type":"ContainerStarted","Data":"bcf604ff46486ef1dea9a9531b5f733b1319f87b4023f88941fbb607ef7c9008"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.719595 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.719625 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.719638 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.719655 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.719668 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:31Z","lastTransitionTime":"2025-12-11T09:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.740886 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36b7ed51-7958-44ed-94a5-17d3f2d6c3a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1211 09:21:23.104837 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 09:21:23.131349 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-749709705/tls.crt::/tmp/serving-cert-749709705/tls.key\\\\\\\"\\\\nI1211 09:21:28.486991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 09:21:28.492493 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 09:21:28.492552 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 09:21:28.492586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 09:21:28.492596 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 09:21:28.511780 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 09:21:28.511811 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 09:21:28.511825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 09:21:28.511828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 09:21:28.511830 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 09:21:28.511856 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 09:21:28.515159 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.766927 4788 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"47dfd2f0-3ae8-470e-8291-4c306acc36e8\\\",\\\"systemUUID\\\":\\\"14f7cb2d-755c-4224-b9c8-56700be1f839\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.777818 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.778083 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.778142 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.778200 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.778275 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:31Z","lastTransitionTime":"2025-12-11T09:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.791274 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-9wzpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0cccaa-c739-4361-a1ef-0dfac45097c7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-944s8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-9wzpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.846953 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b230e-2bb1-4711-894b-281454b998de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2hwsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.870110 4788 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"47dfd2f0-3ae8-470e-8291-4c306acc36e8\\\",\\\"systemUUID\\\":\\\"14f7cb2d-755c-4224-b9c8-56700be1f839\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.881317 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.881355 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.881367 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.881385 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.881398 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:31Z","lastTransitionTime":"2025-12-11T09:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.904259 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d8c45-aadc-4a18-b9a0-806b375e3ea3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bccbdbe825fd0f3e0438bffa62aadbdfcd8b1ca4868ff1d9cd2bede2ec7a87dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1bcee8a3d5a255585372cbdeb4d9d6c8d4b57688f307d210447e1bcfceb8c5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe05c9ee71547ad72b83166ce41925b34ffd1e4aef988057e5d8ad0995c2d974\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.916492 4788 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"47dfd2f0-3ae8-470e-8291-4c306acc36e8\\\",\\\"systemUUID\\\":\\\"14f7cb2d-755c-4224-b9c8-56700be1f839\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.925501 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.925668 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.925715 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.925726 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.925746 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.925759 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:31Z","lastTransitionTime":"2025-12-11T09:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.954113 4788 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"47dfd2f0-3ae8-470e-8291-4c306acc36e8\\\",\\\"systemUUID\\\":\\\"14f7cb2d-755c-4224-b9c8-56700be1f839\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.956638 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.969299 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.969823 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.969867 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.969893 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.969906 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:31Z","lastTransitionTime":"2025-12-11T09:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.984514 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.997438 4788 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"47dfd2f0-3ae8-470e-8291-4c306acc36e8\\\",\\\"systemUUID\\\":\\\"14f7cb2d-755c-4224-b9c8-56700be1f839\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:31 crc kubenswrapper[4788]: E1211 09:21:31.997618 4788 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 09:21:31 crc kubenswrapper[4788]: I1211 09:21:31.997616 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l5m4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l5m4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:31Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.000464 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.000516 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.000532 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.000554 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.000571 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:32Z","lastTransitionTime":"2025-12-11T09:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.019501 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3541a671-d810-482f-bf54-71e8f344b788\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.036812 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13745c02efee94e7962f18db2616ab52f24db46abf08118a1523d82b9dd8118d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.053241 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.070777 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a740254f68676e0d7c0cb460b03666a72534f0ac2c35c4742b638dc24e397930\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://683854b24c7d4a64aa60f3e5791c535401d7a49cb4148cf643812e6bfa8a3d35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.087923 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6eaad5db-9692-4fdb-982f-22fd2703e0b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w8xd5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w8xd5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-b5z5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.103337 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.103381 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.103392 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.103412 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.103422 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:32Z","lastTransitionTime":"2025-12-11T09:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.113860 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gtkxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3cea019-16b2-4a01-a945-cd2b37745330\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n94k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gtkxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.135207 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gtkxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3cea019-16b2-4a01-a945-cd2b37745330\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc22f8b9744bc5587881d46a768d97e9eef1660f056492bc4b117e2eb7c5040c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8n94k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gtkxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.159692 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3541a671-d810-482f-bf54-71e8f344b788\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wlknk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-twvxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.180761 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13745c02efee94e7962f18db2616ab52f24db46abf08118a1523d82b9dd8118d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.208308 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.208357 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.208367 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.208383 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.208393 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:32Z","lastTransitionTime":"2025-12-11T09:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.213785 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.232981 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a740254f68676e0d7c0cb460b03666a72534f0ac2c35c4742b638dc24e397930\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://683854b24c7d4a64aa60f3e5791c535401d7a49cb4148cf643812e6bfa8a3d35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.252422 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6eaad5db-9692-4fdb-982f-22fd2703e0b0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://306cd79e54a2857d01eb61d4e5c2576a4625e4eadab176013b50b986d0d17aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w8xd5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://607373e5c50f16bf23e11020a8b1f6a12dbe8b05ab7cbb3b21540e1d0962cdd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w8xd5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-b5z5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.279584 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36b7ed51-7958-44ed-94a5-17d3f2d6c3a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1211 09:21:23.104837 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 09:21:23.131349 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-749709705/tls.crt::/tmp/serving-cert-749709705/tls.key\\\\\\\"\\\\nI1211 09:21:28.486991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 09:21:28.492493 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 09:21:28.492552 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 09:21:28.492586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 09:21:28.492596 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 09:21:28.511780 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 09:21:28.511811 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 09:21:28.511825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 09:21:28.511828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 09:21:28.511830 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 09:21:28.511856 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 09:21:28.515159 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.295996 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-9wzpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0cccaa-c739-4361-a1ef-0dfac45097c7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcf604ff46486ef1dea9a9531b5f733b1319f87b4023f88941fbb607ef7c9008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-944s8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-9wzpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.313772 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.313813 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.313825 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.313844 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.313856 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:32Z","lastTransitionTime":"2025-12-11T09:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.328250 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b230e-2bb1-4711-894b-281454b998de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33d1a006e4579469d017ed68b1cc316432f8361e83abb286843ff187890d6a40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33d1a006e4579469d017ed68b1cc316432f8361e83abb286843ff187890d6a40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2hwsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.347188 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d8c45-aadc-4a18-b9a0-806b375e3ea3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bccbdbe825fd0f3e0438bffa62aadbdfcd8b1ca4868ff1d9cd2bede2ec7a87dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1bcee8a3d5a255585372cbdeb4d9d6c8d4b57688f307d210447e1bcfceb8c5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe05c9ee71547ad72b83166ce41925b34ffd1e4aef988057e5d8ad0995c2d974\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.365352 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.384783 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.408565 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.417778 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.418005 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.418131 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.418280 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.418386 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:32Z","lastTransitionTime":"2025-12-11T09:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.430027 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l5m4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4e48c30c48bb3480b8db75d2d67dbc8c20fc75a453a06b90252de52d56f4267\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l5m4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.522488 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.522538 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.522549 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.522569 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.522583 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:32Z","lastTransitionTime":"2025-12-11T09:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.626302 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.626714 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.626727 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.626747 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.626758 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:32Z","lastTransitionTime":"2025-12-11T09:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.726374 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" event={"ID":"f42b230e-2bb1-4711-894b-281454b998de","Type":"ContainerStarted","Data":"c43d215827a212087871949914c96bfbd47ef2a19d880aa04101fe8e14b085eb"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.729482 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.729533 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.729547 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.729570 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.729588 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:32Z","lastTransitionTime":"2025-12-11T09:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.732562 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvxc_3541a671-d810-482f-bf54-71e8f344b788/ovn-acl-logging/0.log" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.734201 4788 generic.go:334] "Generic (PLEG): container finished" podID="3541a671-d810-482f-bf54-71e8f344b788" containerID="3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3" exitCode=1 Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.734297 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerStarted","Data":"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.734389 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerStarted","Data":"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.734405 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerStarted","Data":"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.734416 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerDied","Data":"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.734433 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerStarted","Data":"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.747122 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36b7ed51-7958-44ed-94a5-17d3f2d6c3a2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1211 09:21:23.104837 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 09:21:23.131349 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-749709705/tls.crt::/tmp/serving-cert-749709705/tls.key\\\\\\\"\\\\nI1211 09:21:28.486991 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 09:21:28.492493 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 09:21:28.492552 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 09:21:28.492586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 09:21:28.492596 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 09:21:28.511780 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1211 09:21:28.511811 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 09:21:28.511821 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 09:21:28.511825 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 09:21:28.511828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 09:21:28.511830 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 09:21:28.511856 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1211 09:21:28.515159 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T09:21:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.766465 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-9wzpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0cccaa-c739-4361-a1ef-0dfac45097c7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcf604ff46486ef1dea9a9531b5f733b1319f87b4023f88941fbb607ef7c9008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-944s8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-9wzpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.800671 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b230e-2bb1-4711-894b-281454b998de\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33d1a006e4579469d017ed68b1cc316432f8361e83abb286843ff187890d6a40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33d1a006e4579469d017ed68b1cc316432f8361e83abb286843ff187890d6a40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T09:21:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c43d215827a212087871949914c96bfbd47ef2a19d880aa04101fe8e14b085eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rncsg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2hwsx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.816047 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"457d8c45-aadc-4a18-b9a0-806b375e3ea3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bccbdbe825fd0f3e0438bffa62aadbdfcd8b1ca4868ff1d9cd2bede2ec7a87dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1bcee8a3d5a255585372cbdeb4d9d6c8d4b57688f307d210447e1bcfceb8c5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe05c9ee71547ad72b83166ce41925b34ffd1e4aef988057e5d8ad0995c2d974\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.832435 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.832482 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.832496 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.832516 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.832532 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:32Z","lastTransitionTime":"2025-12-11T09:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.841801 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.861218 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.893920 4788 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-l5m4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9049f11-e2f9-4ef4-a2b9-a783604a15d2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T09:21:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4e48c30c48bb3480b8db75d2d67dbc8c20fc75a453a06b90252de52d56f4267\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T09:21:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5xlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T09:21:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-l5m4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T09:21:32Z is after 2025-08-24T17:21:41Z" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.935506 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.935547 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.935561 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.935581 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:32 crc kubenswrapper[4788]: I1211 09:21:32.935595 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:32Z","lastTransitionTime":"2025-12-11T09:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.038264 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.038328 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.038342 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.038362 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.038380 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:33Z","lastTransitionTime":"2025-12-11T09:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.060812 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podStartSLOduration=4.060783257 podStartE2EDuration="4.060783257s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:33.060777227 +0000 UTC m=+23.131556803" watchObservedRunningTime="2025-12-11 09:21:33.060783257 +0000 UTC m=+23.131562843" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.078928 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-gtkxj" podStartSLOduration=4.07890497 podStartE2EDuration="4.07890497s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:33.07770019 +0000 UTC m=+23.148479776" watchObservedRunningTime="2025-12-11 09:21:33.07890497 +0000 UTC m=+23.149684556" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.141063 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.141130 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.141144 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.141162 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.141174 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:33Z","lastTransitionTime":"2025-12-11T09:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.244032 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.244097 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.244113 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.244140 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.244159 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:33Z","lastTransitionTime":"2025-12-11T09:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.346467 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.346541 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.346554 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.346575 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.346586 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:33Z","lastTransitionTime":"2025-12-11T09:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.388019 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.388264 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:37.388219779 +0000 UTC m=+27.458999365 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.388372 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.388470 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.388504 4788 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.388579 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:37.388559247 +0000 UTC m=+27.459338823 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.388609 4788 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.388651 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:37.38864431 +0000 UTC m=+27.459423896 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.405996 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9"] Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.406512 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.408477 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.408936 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.431896 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=5.43187319 podStartE2EDuration="5.43187319s" podCreationTimestamp="2025-12-11 09:21:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:33.431045869 +0000 UTC m=+23.501825455" watchObservedRunningTime="2025-12-11 09:21:33.43187319 +0000 UTC m=+23.502652776" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.437753 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-ddsq9"] Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.438430 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.438504 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsq9" podUID="d8564117-b311-48b1-810b-5c95106cf868" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.449401 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.449460 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.449474 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.449497 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.449516 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:33Z","lastTransitionTime":"2025-12-11T09:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.465737 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-l5m4p" podStartSLOduration=4.465703625 podStartE2EDuration="4.465703625s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:33.465604463 +0000 UTC m=+23.536384049" watchObservedRunningTime="2025-12-11 09:21:33.465703625 +0000 UTC m=+23.536483211" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.489497 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.489644 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.489812 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.489875 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.489890 4788 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.489964 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.490020 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.490039 4788 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.489977 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:37.489953331 +0000 UTC m=+27.560732917 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.490173 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:37.490143456 +0000 UTC m=+27.560923242 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.494792 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.494831 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.494897 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.494931 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.495009 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.495158 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.515064 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=4.515038628 podStartE2EDuration="4.515038628s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:33.51470463 +0000 UTC m=+23.585484236" watchObservedRunningTime="2025-12-11 09:21:33.515038628 +0000 UTC m=+23.585818224" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.528953 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-9wzpd" podStartSLOduration=4.528920855 podStartE2EDuration="4.528920855s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:33.528100654 +0000 UTC m=+23.598880240" watchObservedRunningTime="2025-12-11 09:21:33.528920855 +0000 UTC m=+23.599700441" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.553482 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.553541 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.553558 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.553581 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.553595 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:33Z","lastTransitionTime":"2025-12-11T09:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.590865 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs\") pod \"network-metrics-daemon-ddsq9\" (UID: \"d8564117-b311-48b1-810b-5c95106cf868\") " pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.590916 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4blk\" (UniqueName: \"kubernetes.io/projected/c797f8a3-6cde-4c88-b26e-e96466c56aea-kube-api-access-n4blk\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.590946 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c797f8a3-6cde-4c88-b26e-e96466c56aea-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.590968 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c797f8a3-6cde-4c88-b26e-e96466c56aea-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.591394 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlswc\" (UniqueName: \"kubernetes.io/projected/d8564117-b311-48b1-810b-5c95106cf868-kube-api-access-mlswc\") pod \"network-metrics-daemon-ddsq9\" (UID: \"d8564117-b311-48b1-810b-5c95106cf868\") " pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.591458 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c797f8a3-6cde-4c88-b26e-e96466c56aea-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.655982 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.656044 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.656060 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.656085 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.656096 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:33Z","lastTransitionTime":"2025-12-11T09:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.692790 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4blk\" (UniqueName: \"kubernetes.io/projected/c797f8a3-6cde-4c88-b26e-e96466c56aea-kube-api-access-n4blk\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.692861 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c797f8a3-6cde-4c88-b26e-e96466c56aea-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.692889 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c797f8a3-6cde-4c88-b26e-e96466c56aea-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.693000 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlswc\" (UniqueName: \"kubernetes.io/projected/d8564117-b311-48b1-810b-5c95106cf868-kube-api-access-mlswc\") pod \"network-metrics-daemon-ddsq9\" (UID: \"d8564117-b311-48b1-810b-5c95106cf868\") " pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.693038 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c797f8a3-6cde-4c88-b26e-e96466c56aea-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.693110 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs\") pod \"network-metrics-daemon-ddsq9\" (UID: \"d8564117-b311-48b1-810b-5c95106cf868\") " pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.693272 4788 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 09:21:33 crc kubenswrapper[4788]: E1211 09:21:33.693364 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs podName:d8564117-b311-48b1-810b-5c95106cf868 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:34.193340093 +0000 UTC m=+24.264119689 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs") pod "network-metrics-daemon-ddsq9" (UID: "d8564117-b311-48b1-810b-5c95106cf868") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.694985 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c797f8a3-6cde-4c88-b26e-e96466c56aea-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.695597 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c797f8a3-6cde-4c88-b26e-e96466c56aea-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.702903 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c797f8a3-6cde-4c88-b26e-e96466c56aea-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.711437 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4blk\" (UniqueName: \"kubernetes.io/projected/c797f8a3-6cde-4c88-b26e-e96466c56aea-kube-api-access-n4blk\") pod \"ovnkube-control-plane-749d76644c-jsnn9\" (UID: \"c797f8a3-6cde-4c88-b26e-e96466c56aea\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.714269 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlswc\" (UniqueName: \"kubernetes.io/projected/d8564117-b311-48b1-810b-5c95106cf868-kube-api-access-mlswc\") pod \"network-metrics-daemon-ddsq9\" (UID: \"d8564117-b311-48b1-810b-5c95106cf868\") " pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.721603 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.742117 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c3fdb47d0b41f0388223f0e851d504d9d6b0a6dc0401aca29352a535143b1aa9"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.746416 4788 generic.go:334] "Generic (PLEG): container finished" podID="f42b230e-2bb1-4711-894b-281454b998de" containerID="c43d215827a212087871949914c96bfbd47ef2a19d880aa04101fe8e14b085eb" exitCode=0 Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.746556 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" event={"ID":"f42b230e-2bb1-4711-894b-281454b998de","Type":"ContainerDied","Data":"c43d215827a212087871949914c96bfbd47ef2a19d880aa04101fe8e14b085eb"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.761740 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.761808 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.761823 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.761847 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.761862 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:33Z","lastTransitionTime":"2025-12-11T09:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.762523 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvxc_3541a671-d810-482f-bf54-71e8f344b788/ovn-acl-logging/0.log" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.766400 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerStarted","Data":"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.869196 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.869274 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.869293 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.869317 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.869337 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:33Z","lastTransitionTime":"2025-12-11T09:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.973930 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.973988 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.974001 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.974022 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:33 crc kubenswrapper[4788]: I1211 09:21:33.974034 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:33Z","lastTransitionTime":"2025-12-11T09:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.077171 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.077215 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.077243 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.077259 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.077270 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:34Z","lastTransitionTime":"2025-12-11T09:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.179893 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.179937 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.179948 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.179962 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.179976 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:34Z","lastTransitionTime":"2025-12-11T09:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.199831 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs\") pod \"network-metrics-daemon-ddsq9\" (UID: \"d8564117-b311-48b1-810b-5c95106cf868\") " pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:34 crc kubenswrapper[4788]: E1211 09:21:34.200096 4788 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 09:21:34 crc kubenswrapper[4788]: E1211 09:21:34.200219 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs podName:d8564117-b311-48b1-810b-5c95106cf868 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:35.200189398 +0000 UTC m=+25.270969164 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs") pod "network-metrics-daemon-ddsq9" (UID: "d8564117-b311-48b1-810b-5c95106cf868") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.282947 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.282987 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.282997 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.283011 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.283020 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:34Z","lastTransitionTime":"2025-12-11T09:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.385389 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.385446 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.385466 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.385498 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.385512 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:34Z","lastTransitionTime":"2025-12-11T09:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.488732 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.488777 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.488790 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.488806 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.488815 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:34Z","lastTransitionTime":"2025-12-11T09:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.495343 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:34 crc kubenswrapper[4788]: E1211 09:21:34.495478 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsq9" podUID="d8564117-b311-48b1-810b-5c95106cf868" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.590666 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.590711 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.590724 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.590743 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.590755 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:34Z","lastTransitionTime":"2025-12-11T09:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.693574 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.693612 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.693621 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.693639 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.693672 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:34Z","lastTransitionTime":"2025-12-11T09:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.772946 4788 generic.go:334] "Generic (PLEG): container finished" podID="f42b230e-2bb1-4711-894b-281454b998de" containerID="6a10d8655e403a775c5cd3cfcbd2388a39c5e8caec832bb2b21d873a21a21398" exitCode=0 Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.773056 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" event={"ID":"f42b230e-2bb1-4711-894b-281454b998de","Type":"ContainerDied","Data":"6a10d8655e403a775c5cd3cfcbd2388a39c5e8caec832bb2b21d873a21a21398"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.778114 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" event={"ID":"c797f8a3-6cde-4c88-b26e-e96466c56aea","Type":"ContainerStarted","Data":"9586023effe85f574a24ef1d132f90d25c5953fd5200f110f398b929fe3f8283"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.778168 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" event={"ID":"c797f8a3-6cde-4c88-b26e-e96466c56aea","Type":"ContainerStarted","Data":"066514a8c2c10d762357ee09f15c54a548c718909d1062c4d04891a54553a558"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.778180 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" event={"ID":"c797f8a3-6cde-4c88-b26e-e96466c56aea","Type":"ContainerStarted","Data":"87cae023c878f731b091f37490b18fac823b1461cefe54e9e175760a78447e4d"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.795816 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.795875 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.795889 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.795914 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.795933 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:34Z","lastTransitionTime":"2025-12-11T09:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.898895 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.898948 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.898962 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.898981 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:34 crc kubenswrapper[4788]: I1211 09:21:34.898994 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:34Z","lastTransitionTime":"2025-12-11T09:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.001595 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.001642 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.001653 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.001670 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.001680 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:35Z","lastTransitionTime":"2025-12-11T09:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.105084 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.105138 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.105150 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.105169 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.105179 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:35Z","lastTransitionTime":"2025-12-11T09:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.207986 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.208010 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.208020 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.208033 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.208042 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:35Z","lastTransitionTime":"2025-12-11T09:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.209682 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs\") pod \"network-metrics-daemon-ddsq9\" (UID: \"d8564117-b311-48b1-810b-5c95106cf868\") " pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:35 crc kubenswrapper[4788]: E1211 09:21:35.209790 4788 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 09:21:35 crc kubenswrapper[4788]: E1211 09:21:35.209831 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs podName:d8564117-b311-48b1-810b-5c95106cf868 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:37.209819786 +0000 UTC m=+27.280599372 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs") pod "network-metrics-daemon-ddsq9" (UID: "d8564117-b311-48b1-810b-5c95106cf868") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.310709 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.311084 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.311096 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.311116 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.311128 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:35Z","lastTransitionTime":"2025-12-11T09:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.414160 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.414222 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.414250 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.414271 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.414287 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:35Z","lastTransitionTime":"2025-12-11T09:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.495318 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.495389 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:35 crc kubenswrapper[4788]: E1211 09:21:35.495450 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 09:21:35 crc kubenswrapper[4788]: E1211 09:21:35.495606 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.495314 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:35 crc kubenswrapper[4788]: E1211 09:21:35.495721 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.517200 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.517289 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.517309 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.517334 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.517352 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:35Z","lastTransitionTime":"2025-12-11T09:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.620782 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.620836 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.620850 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.620872 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.620890 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:35Z","lastTransitionTime":"2025-12-11T09:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.727166 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.727213 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.727222 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.727253 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.727263 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:35Z","lastTransitionTime":"2025-12-11T09:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.785923 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvxc_3541a671-d810-482f-bf54-71e8f344b788/ovn-acl-logging/0.log" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.786822 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerStarted","Data":"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.786874 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerStarted","Data":"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.787427 4788 scope.go:117] "RemoveContainer" containerID="3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.787854 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.787934 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.788001 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.792039 4788 generic.go:334] "Generic (PLEG): container finished" podID="f42b230e-2bb1-4711-894b-281454b998de" containerID="aff3c184aa612761585b41b841bdfe5e6ea6db345bdc01519e183469dc01211e" exitCode=0 Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.792107 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" event={"ID":"f42b230e-2bb1-4711-894b-281454b998de","Type":"ContainerDied","Data":"aff3c184aa612761585b41b841bdfe5e6ea6db345bdc01519e183469dc01211e"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.816808 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.821338 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.830659 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.830709 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.830720 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.830740 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.830752 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:35Z","lastTransitionTime":"2025-12-11T09:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.902834 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jsnn9" podStartSLOduration=5.902808982 podStartE2EDuration="5.902808982s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:35.901671594 +0000 UTC m=+25.972451210" watchObservedRunningTime="2025-12-11 09:21:35.902808982 +0000 UTC m=+25.973588568" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.934256 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.934315 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.934328 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.934349 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:35 crc kubenswrapper[4788]: I1211 09:21:35.934366 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:35Z","lastTransitionTime":"2025-12-11T09:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.036614 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.036656 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.036667 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.036681 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.036690 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:36Z","lastTransitionTime":"2025-12-11T09:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.139814 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.139875 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.139891 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.139915 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.139931 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:36Z","lastTransitionTime":"2025-12-11T09:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.243006 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.243050 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.243064 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.243081 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.243092 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:36Z","lastTransitionTime":"2025-12-11T09:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.346004 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.346048 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.346064 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.346082 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.346096 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:36Z","lastTransitionTime":"2025-12-11T09:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.449155 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.449270 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.449299 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.449330 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.449355 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:36Z","lastTransitionTime":"2025-12-11T09:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.495320 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:36 crc kubenswrapper[4788]: E1211 09:21:36.495519 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsq9" podUID="d8564117-b311-48b1-810b-5c95106cf868" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.551767 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.551841 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.551862 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.551888 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.551905 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:36Z","lastTransitionTime":"2025-12-11T09:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.654326 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.654394 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.654410 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.654441 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.654460 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:36Z","lastTransitionTime":"2025-12-11T09:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.757072 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.757135 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.757146 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.757167 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.757179 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:36Z","lastTransitionTime":"2025-12-11T09:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.798897 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" event={"ID":"f42b230e-2bb1-4711-894b-281454b998de","Type":"ContainerStarted","Data":"e121bbb451109874650a657da5ef45112f0af0d858afede636737731ef3443b2"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.803574 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvxc_3541a671-d810-482f-bf54-71e8f344b788/ovn-acl-logging/0.log" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.804841 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerStarted","Data":"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.858763 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" podStartSLOduration=7.858738389 podStartE2EDuration="7.858738389s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:36.855722903 +0000 UTC m=+26.926502509" watchObservedRunningTime="2025-12-11 09:21:36.858738389 +0000 UTC m=+26.929517975" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.861439 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.861476 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.861488 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.861505 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.861518 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:36Z","lastTransitionTime":"2025-12-11T09:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.964991 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.965048 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.965061 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.965078 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:36 crc kubenswrapper[4788]: I1211 09:21:36.965649 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:36Z","lastTransitionTime":"2025-12-11T09:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.069700 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.069746 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.069756 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.069772 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.069783 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:37Z","lastTransitionTime":"2025-12-11T09:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.177257 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.177317 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.177329 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.177350 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.177362 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:37Z","lastTransitionTime":"2025-12-11T09:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.234342 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs\") pod \"network-metrics-daemon-ddsq9\" (UID: \"d8564117-b311-48b1-810b-5c95106cf868\") " pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.234571 4788 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.234662 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs podName:d8564117-b311-48b1-810b-5c95106cf868 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:41.234635082 +0000 UTC m=+31.305414768 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs") pod "network-metrics-daemon-ddsq9" (UID: "d8564117-b311-48b1-810b-5c95106cf868") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.280323 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.280368 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.280380 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.280399 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.280413 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:37Z","lastTransitionTime":"2025-12-11T09:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.383405 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.383493 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.383506 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.383529 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.383544 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:37Z","lastTransitionTime":"2025-12-11T09:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.436479 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.436704 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.436740 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.436704451 +0000 UTC m=+35.507484037 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.436883 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.436888 4788 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.436980 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.436954647 +0000 UTC m=+35.507734403 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.437081 4788 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.437215 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.437192413 +0000 UTC m=+35.507971989 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.487300 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.487342 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.487352 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.487370 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.487381 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:37Z","lastTransitionTime":"2025-12-11T09:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.495355 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.495389 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.495448 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.495579 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.496055 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.496133 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.538250 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.538320 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.538450 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.538479 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.538492 4788 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.538541 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.538527545 +0000 UTC m=+35.609307131 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.538450 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.538588 4788 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.538601 4788 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:37 crc kubenswrapper[4788]: E1211 09:21:37.538633 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.538623038 +0000 UTC m=+35.609402634 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.589734 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.589773 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.589782 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.589795 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.589805 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:37Z","lastTransitionTime":"2025-12-11T09:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.692786 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.692818 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.692830 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.692845 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.692857 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:37Z","lastTransitionTime":"2025-12-11T09:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.795631 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.795665 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.795672 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.795685 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.795694 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:37Z","lastTransitionTime":"2025-12-11T09:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.898750 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.898890 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.898905 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.898928 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:37 crc kubenswrapper[4788]: I1211 09:21:37.898940 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:37Z","lastTransitionTime":"2025-12-11T09:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.001530 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.001566 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.001576 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.001591 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.001601 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:38Z","lastTransitionTime":"2025-12-11T09:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.103982 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.104041 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.104052 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.104071 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.104086 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:38Z","lastTransitionTime":"2025-12-11T09:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.206993 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.207046 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.207056 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.207076 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.207088 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:38Z","lastTransitionTime":"2025-12-11T09:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.310279 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.310330 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.310342 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.310362 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.310376 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:38Z","lastTransitionTime":"2025-12-11T09:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.412883 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.412933 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.412944 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.412959 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.412970 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:38Z","lastTransitionTime":"2025-12-11T09:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.421657 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ddsq9"] Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.421772 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:38 crc kubenswrapper[4788]: E1211 09:21:38.421852 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsq9" podUID="d8564117-b311-48b1-810b-5c95106cf868" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.514735 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.514800 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.514808 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.514840 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.514851 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:38Z","lastTransitionTime":"2025-12-11T09:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.617029 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.617063 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.617071 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.617084 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.617093 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:38Z","lastTransitionTime":"2025-12-11T09:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.719358 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.719438 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.719451 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.719478 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.719494 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:38Z","lastTransitionTime":"2025-12-11T09:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.815796 4788 generic.go:334] "Generic (PLEG): container finished" podID="f42b230e-2bb1-4711-894b-281454b998de" containerID="e121bbb451109874650a657da5ef45112f0af0d858afede636737731ef3443b2" exitCode=0 Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.815846 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" event={"ID":"f42b230e-2bb1-4711-894b-281454b998de","Type":"ContainerDied","Data":"e121bbb451109874650a657da5ef45112f0af0d858afede636737731ef3443b2"} Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.821448 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.821498 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.821510 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.821529 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.821540 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:38Z","lastTransitionTime":"2025-12-11T09:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.924903 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.925210 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.925220 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.925250 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:38 crc kubenswrapper[4788]: I1211 09:21:38.925261 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:38Z","lastTransitionTime":"2025-12-11T09:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.028215 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.028287 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.028300 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.028327 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.028340 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:39Z","lastTransitionTime":"2025-12-11T09:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.131142 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.131271 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.131293 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.131315 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.131327 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:39Z","lastTransitionTime":"2025-12-11T09:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.233838 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.233874 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.233885 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.233902 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.233914 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:39Z","lastTransitionTime":"2025-12-11T09:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.337154 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.337211 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.337222 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.337265 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.337278 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:39Z","lastTransitionTime":"2025-12-11T09:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.440117 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.440169 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.440183 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.440209 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.440240 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:39Z","lastTransitionTime":"2025-12-11T09:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.495331 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.495423 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.495471 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:39 crc kubenswrapper[4788]: E1211 09:21:39.495514 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.495425 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:39 crc kubenswrapper[4788]: E1211 09:21:39.495597 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 09:21:39 crc kubenswrapper[4788]: E1211 09:21:39.495663 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-ddsq9" podUID="d8564117-b311-48b1-810b-5c95106cf868" Dec 11 09:21:39 crc kubenswrapper[4788]: E1211 09:21:39.495791 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.543846 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.543889 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.543899 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.543918 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.543930 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:39Z","lastTransitionTime":"2025-12-11T09:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.646539 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.646599 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.646610 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.646629 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.646648 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:39Z","lastTransitionTime":"2025-12-11T09:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.749708 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.749770 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.749788 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.749814 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.749833 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:39Z","lastTransitionTime":"2025-12-11T09:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.823725 4788 generic.go:334] "Generic (PLEG): container finished" podID="f42b230e-2bb1-4711-894b-281454b998de" containerID="9189eb8434c37f7ef30dec9335e065b15c871aec146d85d29d3e5d834db165eb" exitCode=0 Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.823797 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" event={"ID":"f42b230e-2bb1-4711-894b-281454b998de","Type":"ContainerDied","Data":"9189eb8434c37f7ef30dec9335e065b15c871aec146d85d29d3e5d834db165eb"} Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.853471 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.853539 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.853550 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.853569 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.853580 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:39Z","lastTransitionTime":"2025-12-11T09:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.956888 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.956959 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.956974 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.957047 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:39 crc kubenswrapper[4788]: I1211 09:21:39.957060 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:39Z","lastTransitionTime":"2025-12-11T09:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.059475 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.059518 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.059527 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.059545 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.059556 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:40Z","lastTransitionTime":"2025-12-11T09:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.162347 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.162433 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.162446 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.162467 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.162480 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:40Z","lastTransitionTime":"2025-12-11T09:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.265357 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.265411 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.265431 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.265453 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.265479 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:40Z","lastTransitionTime":"2025-12-11T09:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.368743 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.368807 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.368828 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.368856 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.368873 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:40Z","lastTransitionTime":"2025-12-11T09:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.472339 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.472412 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.472433 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.472658 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.472680 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:40Z","lastTransitionTime":"2025-12-11T09:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.575696 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.575739 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.575749 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.575767 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.575780 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:40Z","lastTransitionTime":"2025-12-11T09:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.678383 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.678711 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.678720 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.678734 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.678744 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:40Z","lastTransitionTime":"2025-12-11T09:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.782479 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.782821 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.782863 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.782889 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.782906 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:40Z","lastTransitionTime":"2025-12-11T09:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.833854 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" event={"ID":"f42b230e-2bb1-4711-894b-281454b998de","Type":"ContainerStarted","Data":"fabba47c79b61b02133859a78ef234a9c6e4b640f62116329b00d094522157d5"} Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.865167 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-2hwsx" podStartSLOduration=11.865123888 podStartE2EDuration="11.865123888s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:40.864896193 +0000 UTC m=+30.935675799" watchObservedRunningTime="2025-12-11 09:21:40.865123888 +0000 UTC m=+30.935903474" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.885785 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.885823 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.885835 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.885852 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.885861 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:40Z","lastTransitionTime":"2025-12-11T09:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.989085 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.989138 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.989153 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.989175 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 09:21:40 crc kubenswrapper[4788]: I1211 09:21:40.989191 4788 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T09:21:40Z","lastTransitionTime":"2025-12-11T09:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.092204 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.092322 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.092346 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.092379 4788 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.092637 4788 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.139566 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-47nzd"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.140216 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.140442 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.140866 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.141343 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.141410 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-mg2kx"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.141784 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.142077 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.155297 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.155348 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.155506 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.155771 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.159166 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.159166 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.159495 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.159532 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.159710 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.159804 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.159934 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160015 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160093 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160104 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160176 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160214 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160331 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160351 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160453 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160501 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160583 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160667 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160708 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160831 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160870 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160896 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.160967 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.161016 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.161148 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.161916 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.162325 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.162510 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201329 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201375 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-encryption-config\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201405 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-etcd-client\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201442 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd4a1f90-9ac0-41cc-b980-91964f48715d-serving-cert\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201460 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-node-pullsecrets\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201474 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0783fcec-bf1b-4910-b8c2-08d85c53093a-config\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201488 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-etcd-serving-ca\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201507 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-config\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201532 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-serving-cert\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201557 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk529\" (UniqueName: \"kubernetes.io/projected/19c4e640-d1ed-47db-83f6-4e4656b7138b-kube-api-access-zk529\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201584 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-audit\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201602 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0783fcec-bf1b-4910-b8c2-08d85c53093a-images\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201632 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/19c4e640-d1ed-47db-83f6-4e4656b7138b-auth-proxy-config\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201649 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-client-ca\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201671 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vl64\" (UniqueName: \"kubernetes.io/projected/0783fcec-bf1b-4910-b8c2-08d85c53093a-kube-api-access-4vl64\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201693 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsrg5\" (UniqueName: \"kubernetes.io/projected/cd4a1f90-9ac0-41cc-b980-91964f48715d-kube-api-access-hsrg5\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201713 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-image-import-ca\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201734 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/19c4e640-d1ed-47db-83f6-4e4656b7138b-machine-approver-tls\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201754 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19c4e640-d1ed-47db-83f6-4e4656b7138b-config\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201779 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0783fcec-bf1b-4910-b8c2-08d85c53093a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201797 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-config\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201816 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7gzl\" (UniqueName: \"kubernetes.io/projected/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-kube-api-access-q7gzl\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.201849 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-audit-dir\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.203022 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.203566 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-gstnh"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.203855 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.204334 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.204784 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.205172 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.205558 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.205836 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.205994 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.206065 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.206140 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.206214 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.206351 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.206630 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.205839 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.207163 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.209188 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.209347 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xm9p5"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.209614 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.209754 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.209914 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.209929 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.210206 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.211205 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-dstfn"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.215763 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-z97fq"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.222446 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-jdvlj"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.222863 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zbjxm"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.223258 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.223810 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.224647 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.224994 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.221416 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dstfn" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.225459 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.225299 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.226049 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.226629 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ll24x"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.227001 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.227377 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.227713 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.227727 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.228173 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.228448 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.228608 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tlz6t"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.228780 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.229086 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.229168 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.229692 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.229878 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-6xr2p"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.230197 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.232295 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.245136 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.245494 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.245656 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.245818 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.249059 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9n777"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.249692 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.250457 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-v858r"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.251283 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.271911 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.272923 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.307685 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.307885 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.308057 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.308517 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.308665 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.308843 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.309160 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.309271 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.309549 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.309664 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.309781 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.309957 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.310071 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.310168 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.310353 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.310497 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.311255 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.311396 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.312620 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.312862 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.312895 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.312906 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313023 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313158 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313461 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313522 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-audit\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313548 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313552 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0783fcec-bf1b-4910-b8c2-08d85c53093a-images\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313591 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-oauth-serving-cert\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313612 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/19c4e640-d1ed-47db-83f6-4e4656b7138b-auth-proxy-config\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313631 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-client-ca\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313650 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vl64\" (UniqueName: \"kubernetes.io/projected/0783fcec-bf1b-4910-b8c2-08d85c53093a-kube-api-access-4vl64\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313667 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsrg5\" (UniqueName: \"kubernetes.io/projected/cd4a1f90-9ac0-41cc-b980-91964f48715d-kube-api-access-hsrg5\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313682 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-image-import-ca\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313698 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/19c4e640-d1ed-47db-83f6-4e4656b7138b-machine-approver-tls\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313715 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19c4e640-d1ed-47db-83f6-4e4656b7138b-config\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313729 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-service-ca\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313747 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-oauth-config\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313766 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0783fcec-bf1b-4910-b8c2-08d85c53093a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313785 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-config\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313801 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7gzl\" (UniqueName: \"kubernetes.io/projected/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-kube-api-access-q7gzl\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313817 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-trusted-ca-bundle\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313840 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-audit-dir\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313856 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs\") pod \"network-metrics-daemon-ddsq9\" (UID: \"d8564117-b311-48b1-810b-5c95106cf868\") " pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313871 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-config\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313887 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313901 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-encryption-config\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313925 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-etcd-client\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313942 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-serving-cert\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313957 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd4a1f90-9ac0-41cc-b980-91964f48715d-serving-cert\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313973 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-node-pullsecrets\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.313988 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0783fcec-bf1b-4910-b8c2-08d85c53093a-config\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.314005 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-etcd-serving-ca\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.314022 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-config\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.314037 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mw28p\" (UniqueName: \"kubernetes.io/projected/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-kube-api-access-mw28p\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.314059 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-serving-cert\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.314073 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk529\" (UniqueName: \"kubernetes.io/projected/19c4e640-d1ed-47db-83f6-4e4656b7138b-kube-api-access-zk529\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.314623 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.314792 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.314857 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-audit\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.314905 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-audit-dir\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: E1211 09:21:41.314960 4788 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.314975 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 11 09:21:41 crc kubenswrapper[4788]: E1211 09:21:41.314995 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs podName:d8564117-b311-48b1-810b-5c95106cf868 nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.314983799 +0000 UTC m=+39.385763385 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs") pod "network-metrics-daemon-ddsq9" (UID: "d8564117-b311-48b1-810b-5c95106cf868") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.315108 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.315870 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/19c4e640-d1ed-47db-83f6-4e4656b7138b-auth-proxy-config\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.316463 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-image-import-ca\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.316872 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-client-ca\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.317025 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19c4e640-d1ed-47db-83f6-4e4656b7138b-config\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.317329 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0783fcec-bf1b-4910-b8c2-08d85c53093a-images\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.317348 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.317524 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-config\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.317533 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.317563 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.317652 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.317387 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.317426 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.317476 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.318998 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-etcd-serving-ca\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.323028 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-config\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.324043 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.324159 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.324386 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.324608 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.324790 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.324961 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.331823 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nmcnl"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.332375 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-node-pullsecrets\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.332437 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.332639 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.332931 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.324043 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.333172 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/19c4e640-d1ed-47db-83f6-4e4656b7138b-machine-approver-tls\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.333326 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/0783fcec-bf1b-4910-b8c2-08d85c53093a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.333336 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.333441 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.334683 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-trusted-ca-bundle\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.334771 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.335445 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-encryption-config\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.337119 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd4a1f90-9ac0-41cc-b980-91964f48715d-serving-cert\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.350532 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-etcd-client\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.353727 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0783fcec-bf1b-4910-b8c2-08d85c53093a-config\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.354568 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-47nzd"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.380459 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-c7fld"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.381802 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.382552 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.354688 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.357887 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-serving-cert\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.335660 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.336809 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.340288 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.340412 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.340486 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.390673 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.395114 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.395299 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.396554 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.396768 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.396960 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.397018 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.402438 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.403581 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.404053 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.404080 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.404055 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.404317 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.404723 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.405130 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.406870 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.407420 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.407623 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.409006 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.410144 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.410445 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.413023 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.414268 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fndxq"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.414757 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mw28p\" (UniqueName: \"kubernetes.io/projected/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-kube-api-access-mw28p\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.414824 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-oauth-serving-cert\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.414870 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-service-ca\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.414901 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-oauth-config\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.414930 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-trusted-ca-bundle\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.414969 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-config\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.415006 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-serving-cert\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.415134 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.416193 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-oauth-serving-cert\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.416533 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-service-ca\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.416601 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-gstnh"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.417616 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-config\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.418053 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-mg2kx"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.418993 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-c9cxs"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.419153 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-trusted-ca-bundle\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.419864 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-c9cxs" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.420223 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.420953 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.421621 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.420986 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.423500 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.424982 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.426142 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.426875 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.427053 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.427630 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.430723 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-z97fq"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.430782 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.430796 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.434067 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xm9p5"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.435274 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-2s9b9"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.449749 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.454020 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-snc68"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.454524 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-khc6m"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.454912 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.455187 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-khc6m" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.455377 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.455892 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.456458 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ll24x"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.458949 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-oauth-config\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.459374 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-jdvlj"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.460425 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9n777"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.462138 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tlz6t"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.463404 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.463908 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.464992 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-c7fld"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.466034 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zbjxm"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.466862 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dstfn"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.467725 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fndxq"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.468600 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.469452 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.470287 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.471162 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.471983 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-7kz7q"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.474118 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.474158 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nmcnl"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.474280 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.474534 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-khc6m"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.475508 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.476694 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-v858r"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.477611 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.479556 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.479596 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.480450 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7kz7q"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.481191 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.482294 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.483114 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-2s9b9"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.483441 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.484116 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.485046 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt"] Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.489079 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-serving-cert\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.495140 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.495193 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.495333 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.495479 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.503725 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.522984 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.543994 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.563928 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.584115 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.603554 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.623870 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.643572 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.663963 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.684091 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.704182 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.724956 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.743758 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.763825 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.783620 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.804148 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.823977 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.844640 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.863951 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.883830 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.904478 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.924385 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.945377 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.963963 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 11 09:21:41 crc kubenswrapper[4788]: I1211 09:21:41.985108 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.004047 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.023861 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.044507 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.064175 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.084268 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.105132 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.124018 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.144509 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.163894 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.220368 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk529\" (UniqueName: \"kubernetes.io/projected/19c4e640-d1ed-47db-83f6-4e4656b7138b-kube-api-access-zk529\") pod \"machine-approver-56656f9798-snxvv\" (UID: \"19c4e640-d1ed-47db-83f6-4e4656b7138b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.244307 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vl64\" (UniqueName: \"kubernetes.io/projected/0783fcec-bf1b-4910-b8c2-08d85c53093a-kube-api-access-4vl64\") pod \"machine-api-operator-5694c8668f-47nzd\" (UID: \"0783fcec-bf1b-4910-b8c2-08d85c53093a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.259120 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsrg5\" (UniqueName: \"kubernetes.io/projected/cd4a1f90-9ac0-41cc-b980-91964f48715d-kube-api-access-hsrg5\") pod \"route-controller-manager-6576b87f9c-7qxsh\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.282656 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7gzl\" (UniqueName: \"kubernetes.io/projected/93f3b80a-e2ab-4f3c-87e8-806e5110cf9a-kube-api-access-q7gzl\") pod \"apiserver-76f77b778f-mg2kx\" (UID: \"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a\") " pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.304793 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.324521 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.342100 4788 request.go:700] Waited for 1.00933174s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/secrets?fieldSelector=metadata.name%3Dmarketplace-operator-metrics&limit=500&resourceVersion=0 Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.344611 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.369702 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.375301 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.384478 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: W1211 09:21:42.389829 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19c4e640_d1ed_47db_83f6_4e4656b7138b.slice/crio-5606073744d789a6ee9fb5538c518db77394fc0fb127e15d4b320fb3324f2734 WatchSource:0}: Error finding container 5606073744d789a6ee9fb5538c518db77394fc0fb127e15d4b320fb3324f2734: Status 404 returned error can't find the container with id 5606073744d789a6ee9fb5538c518db77394fc0fb127e15d4b320fb3324f2734 Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.392773 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.404117 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.407275 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.425973 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.429794 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.445264 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.463834 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.484808 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.504402 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.523360 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.544336 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.563829 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.584785 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.604190 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.623852 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.643911 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.664804 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.684779 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.704078 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.724693 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.744658 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.778079 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mw28p\" (UniqueName: \"kubernetes.io/projected/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-kube-api-access-mw28p\") pod \"console-f9d7485db-jdvlj\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.784388 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.804089 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.824139 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.841377 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" event={"ID":"19c4e640-d1ed-47db-83f6-4e4656b7138b","Type":"ContainerStarted","Data":"5606073744d789a6ee9fb5538c518db77394fc0fb127e15d4b320fb3324f2734"} Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.844757 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.863908 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.883965 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.904458 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.924195 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.945185 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.964078 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.981960 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:42 crc kubenswrapper[4788]: I1211 09:21:42.985068 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.004413 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.024106 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.043963 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.063935 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.084880 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.103629 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.124324 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.143325 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.163369 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.182737 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.204270 4788 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.230055 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.243470 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.263394 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-sysctl-allowlist" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.283563 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.304834 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.323879 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.342598 4788 request.go:700] Waited for 1.867968776s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-default-metrics-tls&limit=500&resourceVersion=0 Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.344867 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.369272 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.385206 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.404437 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.424896 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.444563 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.463978 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.483690 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.504650 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.728080 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.996066 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0f6378ba-bda0-4e50-a126-29c23361a240-trusted-ca\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.996211 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtg9d\" (UniqueName: \"kubernetes.io/projected/90a71b66-dd01-4895-8b06-98a8f650cabc-kube-api-access-xtg9d\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.996292 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f6378ba-bda0-4e50-a126-29c23361a240-serving-cert\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.996344 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.996575 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/90a71b66-dd01-4895-8b06-98a8f650cabc-metrics-certs\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:43 crc kubenswrapper[4788]: E1211 09:21:43.996861 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:44.496835312 +0000 UTC m=+34.567614978 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.996928 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-bound-sa-token\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.996985 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f6378ba-bda0-4e50-a126-29c23361a240-config\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.997048 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-trusted-ca\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.997198 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrnsx\" (UniqueName: \"kubernetes.io/projected/0f6378ba-bda0-4e50-a126-29c23361a240-kube-api-access-mrnsx\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.997541 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/90a71b66-dd01-4895-8b06-98a8f650cabc-default-certificate\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.997643 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9bede3eb-f7c3-40df-84a6-2c34e3834acd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.997744 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/90a71b66-dd01-4895-8b06-98a8f650cabc-stats-auth\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.997788 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/90a71b66-dd01-4895-8b06-98a8f650cabc-service-ca-bundle\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.997850 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-certificates\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.997879 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-tls\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.997895 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4gcs\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-kube-api-access-q4gcs\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:43 crc kubenswrapper[4788]: I1211 09:21:43.997913 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9bede3eb-f7c3-40df-84a6-2c34e3834acd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.098901 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.099125 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.099159 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcpqk\" (UniqueName: \"kubernetes.io/projected/b39d101c-fbdd-427c-9369-cbfde9bb50cd-kube-api-access-bcpqk\") pod \"control-plane-machine-set-operator-78cbb6b69f-5ztq4\" (UID: \"b39d101c-fbdd-427c-9369-cbfde9bb50cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.099180 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a71f8b6c-2868-4df6-bafe-a1dc5a0a751e-proxy-tls\") pod \"machine-config-controller-84d6567774-v858r\" (UID: \"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.099202 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.099281 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-dir\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.099291 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:44.59922314 +0000 UTC m=+34.670002726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.099343 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a71f8b6c-2868-4df6-bafe-a1dc5a0a751e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-v858r\" (UID: \"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.099763 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4fb89c3c-e4d1-4365-bff9-d2d010030605-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.099883 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19a593cb-a446-4977-9235-4b4036d2f2f8-serving-cert\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100007 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtg9d\" (UniqueName: \"kubernetes.io/projected/90a71b66-dd01-4895-8b06-98a8f650cabc-kube-api-access-xtg9d\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100047 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c12131a2-d938-4bab-a3f2-b82205ea5a1b-serving-cert\") pod \"openshift-config-operator-7777fb866f-7t5gz\" (UID: \"c12131a2-d938-4bab-a3f2-b82205ea5a1b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100078 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgf6w\" (UniqueName: \"kubernetes.io/projected/c12131a2-d938-4bab-a3f2-b82205ea5a1b-kube-api-access-mgf6w\") pod \"openshift-config-operator-7777fb866f-7t5gz\" (UID: \"c12131a2-d938-4bab-a3f2-b82205ea5a1b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100191 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100422 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100470 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c12131a2-d938-4bab-a3f2-b82205ea5a1b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7t5gz\" (UID: \"c12131a2-d938-4bab-a3f2-b82205ea5a1b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100499 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d393854b-5612-4686-8a9f-c6820b3eee5f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tlz6t\" (UID: \"d393854b-5612-4686-8a9f-c6820b3eee5f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100525 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100555 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100584 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w77px\" (UniqueName: \"kubernetes.io/projected/c0469afa-8902-418c-8545-f5ad9ed2b86b-kube-api-access-w77px\") pod \"openshift-apiserver-operator-796bbdcf4f-sj54m\" (UID: \"c0469afa-8902-418c-8545-f5ad9ed2b86b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100706 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-client-ca\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100769 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100804 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/6806d62e-080e-4c44-a25e-abfd4baa858e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-z2r9q\" (UID: \"6806d62e-080e-4c44-a25e-abfd4baa858e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100834 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-metrics-tls\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100874 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-bound-sa-token\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100904 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6393ba4f-7df4-46a1-9156-42002708133c-config\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.100934 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4fb89c3c-e4d1-4365-bff9-d2d010030605-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101022 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm7cw\" (UniqueName: \"kubernetes.io/projected/6806d62e-080e-4c44-a25e-abfd4baa858e-kube-api-access-cm7cw\") pod \"cluster-samples-operator-665b6dd947-z2r9q\" (UID: \"6806d62e-080e-4c44-a25e-abfd4baa858e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101167 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101212 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-policies\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101265 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c0469afa-8902-418c-8545-f5ad9ed2b86b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-sj54m\" (UID: \"c0469afa-8902-418c-8545-f5ad9ed2b86b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101291 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3f83673-a3d7-4c35-9935-4a0a505732ed-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fvpdj\" (UID: \"e3f83673-a3d7-4c35-9935-4a0a505732ed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101315 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.101357 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:44.601343543 +0000 UTC m=+34.672123319 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101462 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34a801ac-98e6-4a11-b743-ed43c228ce05-config\") pod \"kube-apiserver-operator-766d6c64bb-kr56t\" (UID: \"34a801ac-98e6-4a11-b743-ed43c228ce05\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101578 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101609 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35224190-488f-497f-9ac7-2d37339c9b71-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-52mwx\" (UID: \"35224190-488f-497f-9ac7-2d37339c9b71\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101797 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrnsx\" (UniqueName: \"kubernetes.io/projected/0f6378ba-bda0-4e50-a126-29c23361a240-kube-api-access-mrnsx\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101822 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7e2cf7af-6694-45e9-8d9d-39fce413ba67-audit-dir\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101847 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcskk\" (UniqueName: \"kubernetes.io/projected/d393854b-5612-4686-8a9f-c6820b3eee5f-kube-api-access-vcskk\") pod \"multus-admission-controller-857f4d67dd-tlz6t\" (UID: \"d393854b-5612-4686-8a9f-c6820b3eee5f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101877 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4fb89c3c-e4d1-4365-bff9-d2d010030605-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.101934 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gpkd\" (UniqueName: \"kubernetes.io/projected/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-kube-api-access-6gpkd\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102114 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34a801ac-98e6-4a11-b743-ed43c228ce05-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kr56t\" (UID: \"34a801ac-98e6-4a11-b743-ed43c228ce05\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102176 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/34a801ac-98e6-4a11-b743-ed43c228ce05-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kr56t\" (UID: \"34a801ac-98e6-4a11-b743-ed43c228ce05\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102270 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9bede3eb-f7c3-40df-84a6-2c34e3834acd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102312 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6393ba4f-7df4-46a1-9156-42002708133c-serving-cert\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102338 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7e2cf7af-6694-45e9-8d9d-39fce413ba67-encryption-config\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102359 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102389 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/90a71b66-dd01-4895-8b06-98a8f650cabc-stats-auth\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102414 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0469afa-8902-418c-8545-f5ad9ed2b86b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-sj54m\" (UID: \"c0469afa-8902-418c-8545-f5ad9ed2b86b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102437 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6393ba4f-7df4-46a1-9156-42002708133c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102470 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-trusted-ca\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102506 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efb6f672-100b-4da6-951e-7fd6326353a6-serving-cert\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102541 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efb6f672-100b-4da6-951e-7fd6326353a6-config\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102569 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4gcs\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-kube-api-access-q4gcs\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102587 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/efb6f672-100b-4da6-951e-7fd6326353a6-etcd-ca\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102606 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w48x9\" (UniqueName: \"kubernetes.io/projected/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-kube-api-access-w48x9\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102630 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9bede3eb-f7c3-40df-84a6-2c34e3834acd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102649 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102671 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102689 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6393ba4f-7df4-46a1-9156-42002708133c-service-ca-bundle\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102814 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/efb6f672-100b-4da6-951e-7fd6326353a6-etcd-client\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102845 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv65l\" (UniqueName: \"kubernetes.io/projected/dcfe9e49-8116-4af5-84db-b958e5d3104b-kube-api-access-wv65l\") pod \"downloads-7954f5f757-dstfn\" (UID: \"dcfe9e49-8116-4af5-84db-b958e5d3104b\") " pod="openshift-console/downloads-7954f5f757-dstfn" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102916 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0f6378ba-bda0-4e50-a126-29c23361a240-trusted-ca\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102959 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.102990 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-proxy-tls\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103136 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9bede3eb-f7c3-40df-84a6-2c34e3834acd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103132 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwwrp\" (UniqueName: \"kubernetes.io/projected/6393ba4f-7df4-46a1-9156-42002708133c-kube-api-access-kwwrp\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103286 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f6378ba-bda0-4e50-a126-29c23361a240-serving-cert\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103315 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3f83673-a3d7-4c35-9935-4a0a505732ed-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fvpdj\" (UID: \"e3f83673-a3d7-4c35-9935-4a0a505732ed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103405 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7e2cf7af-6694-45e9-8d9d-39fce413ba67-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103430 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxfxb\" (UniqueName: \"kubernetes.io/projected/efb6f672-100b-4da6-951e-7fd6326353a6-kube-api-access-gxfxb\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103457 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69sj6\" (UniqueName: \"kubernetes.io/projected/19a593cb-a446-4977-9235-4b4036d2f2f8-kube-api-access-69sj6\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103487 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/90a71b66-dd01-4895-8b06-98a8f650cabc-metrics-certs\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103511 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7e2cf7af-6694-45e9-8d9d-39fce413ba67-etcd-client\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103539 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103582 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkbrf\" (UniqueName: \"kubernetes.io/projected/4fb89c3c-e4d1-4365-bff9-d2d010030605-kube-api-access-mkbrf\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103628 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103652 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wlbm\" (UniqueName: \"kubernetes.io/projected/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-kube-api-access-9wlbm\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103675 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35224190-488f-497f-9ac7-2d37339c9b71-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-52mwx\" (UID: \"35224190-488f-497f-9ac7-2d37339c9b71\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103718 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f6378ba-bda0-4e50-a126-29c23361a240-config\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103747 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/b39d101c-fbdd-427c-9369-cbfde9bb50cd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5ztq4\" (UID: \"b39d101c-fbdd-427c-9369-cbfde9bb50cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103773 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e2cf7af-6694-45e9-8d9d-39fce413ba67-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103800 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-trusted-ca\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103823 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-268fj\" (UniqueName: \"kubernetes.io/projected/35224190-488f-497f-9ac7-2d37339c9b71-kube-api-access-268fj\") pod \"kube-storage-version-migrator-operator-b67b599dd-52mwx\" (UID: \"35224190-488f-497f-9ac7-2d37339c9b71\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.103851 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.104464 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0f6378ba-bda0-4e50-a126-29c23361a240-trusted-ca\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.104625 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/90a71b66-dd01-4895-8b06-98a8f650cabc-default-certificate\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.104672 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/efb6f672-100b-4da6-951e-7fd6326353a6-etcd-service-ca\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.104720 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrkww\" (UniqueName: \"kubernetes.io/projected/7e2cf7af-6694-45e9-8d9d-39fce413ba67-kube-api-access-hrkww\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.104749 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.104777 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7e2cf7af-6694-45e9-8d9d-39fce413ba67-audit-policies\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.104830 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qq85j\" (UniqueName: \"kubernetes.io/projected/e3f83673-a3d7-4c35-9935-4a0a505732ed-kube-api-access-qq85j\") pod \"openshift-controller-manager-operator-756b6f6bc6-fvpdj\" (UID: \"e3f83673-a3d7-4c35-9935-4a0a505732ed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.104876 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-images\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.104913 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/90a71b66-dd01-4895-8b06-98a8f650cabc-service-ca-bundle\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.104962 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtmmd\" (UniqueName: \"kubernetes.io/projected/a71f8b6c-2868-4df6-bafe-a1dc5a0a751e-kube-api-access-qtmmd\") pod \"machine-config-controller-84d6567774-v858r\" (UID: \"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.105016 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e2cf7af-6694-45e9-8d9d-39fce413ba67-serving-cert\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.105074 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-certificates\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.105095 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f6378ba-bda0-4e50-a126-29c23361a240-config\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.105126 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-tls\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.105296 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.105412 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-config\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.105438 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.106122 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/90a71b66-dd01-4895-8b06-98a8f650cabc-service-ca-bundle\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.106572 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/90a71b66-dd01-4895-8b06-98a8f650cabc-stats-auth\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.108393 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-tls\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.109852 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/90a71b66-dd01-4895-8b06-98a8f650cabc-default-certificate\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.109990 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/90a71b66-dd01-4895-8b06-98a8f650cabc-metrics-certs\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.110711 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-trusted-ca\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.111845 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-certificates\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.111861 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9bede3eb-f7c3-40df-84a6-2c34e3834acd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.113528 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f6378ba-bda0-4e50-a126-29c23361a240-serving-cert\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.138294 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtg9d\" (UniqueName: \"kubernetes.io/projected/90a71b66-dd01-4895-8b06-98a8f650cabc-kube-api-access-xtg9d\") pod \"router-default-5444994796-6xr2p\" (UID: \"90a71b66-dd01-4895-8b06-98a8f650cabc\") " pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.160802 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-bound-sa-token\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.181011 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrnsx\" (UniqueName: \"kubernetes.io/projected/0f6378ba-bda0-4e50-a126-29c23361a240-kube-api-access-mrnsx\") pod \"console-operator-58897d9998-gstnh\" (UID: \"0f6378ba-bda0-4e50-a126-29c23361a240\") " pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.209714 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4gcs\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-kube-api-access-q4gcs\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.210495 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:44.71047157 +0000 UTC m=+34.781251156 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.210448 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212362 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtmmd\" (UniqueName: \"kubernetes.io/projected/a71f8b6c-2868-4df6-bafe-a1dc5a0a751e-kube-api-access-qtmmd\") pod \"machine-config-controller-84d6567774-v858r\" (UID: \"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212408 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f414d905-192d-42bc-b77c-0d05b5ff8b2f-node-bootstrap-token\") pod \"machine-config-server-c9cxs\" (UID: \"f414d905-192d-42bc-b77c-0d05b5ff8b2f\") " pod="openshift-machine-config-operator/machine-config-server-c9cxs" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212431 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e2cf7af-6694-45e9-8d9d-39fce413ba67-serving-cert\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212467 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92gl9\" (UniqueName: \"kubernetes.io/projected/37cb9609-c336-4c6b-86d5-1d680695f08e-kube-api-access-92gl9\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212485 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9f7c\" (UniqueName: \"kubernetes.io/projected/903b0a02-6ac7-420c-9ba5-3d9562d57168-kube-api-access-r9f7c\") pod \"catalog-operator-68c6474976-9zzr5\" (UID: \"903b0a02-6ac7-420c-9ba5-3d9562d57168\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212505 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212524 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-config\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212542 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212563 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03fbacdb-e497-4e45-af5d-04099435b951-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-wpxdt\" (UID: \"03fbacdb-e497-4e45-af5d-04099435b951\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212583 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212599 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcpqk\" (UniqueName: \"kubernetes.io/projected/b39d101c-fbdd-427c-9369-cbfde9bb50cd-kube-api-access-bcpqk\") pod \"control-plane-machine-set-operator-78cbb6b69f-5ztq4\" (UID: \"b39d101c-fbdd-427c-9369-cbfde9bb50cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212619 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a71f8b6c-2868-4df6-bafe-a1dc5a0a751e-proxy-tls\") pod \"machine-config-controller-84d6567774-v858r\" (UID: \"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212635 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-csi-data-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212663 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212679 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkvsg\" (UniqueName: \"kubernetes.io/projected/8b8e3455-a9d8-480a-b829-665bb41d5bb9-kube-api-access-rkvsg\") pod \"olm-operator-6b444d44fb-k6hvj\" (UID: \"8b8e3455-a9d8-480a-b829-665bb41d5bb9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212695 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-mountpoint-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212710 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/903b0a02-6ac7-420c-9ba5-3d9562d57168-srv-cert\") pod \"catalog-operator-68c6474976-9zzr5\" (UID: \"903b0a02-6ac7-420c-9ba5-3d9562d57168\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212727 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-dir\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212743 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a71f8b6c-2868-4df6-bafe-a1dc5a0a751e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-v858r\" (UID: \"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212762 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nmcnl\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212795 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4fb89c3c-e4d1-4365-bff9-d2d010030605-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212812 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8549231d-3a38-4403-aa63-e8f9090db6ef-apiservice-cert\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212830 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19a593cb-a446-4977-9235-4b4036d2f2f8-serving-cert\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212846 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/37cb9609-c336-4c6b-86d5-1d680695f08e-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212873 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6fc63325-1134-4106-aa40-35d3ec59fc5a-signing-cabundle\") pod \"service-ca-9c57cc56f-fndxq\" (UID: \"6fc63325-1134-4106-aa40-35d3ec59fc5a\") " pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212893 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c12131a2-d938-4bab-a3f2-b82205ea5a1b-serving-cert\") pod \"openshift-config-operator-7777fb866f-7t5gz\" (UID: \"c12131a2-d938-4bab-a3f2-b82205ea5a1b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212910 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgf6w\" (UniqueName: \"kubernetes.io/projected/c12131a2-d938-4bab-a3f2-b82205ea5a1b-kube-api-access-mgf6w\") pod \"openshift-config-operator-7777fb866f-7t5gz\" (UID: \"c12131a2-d938-4bab-a3f2-b82205ea5a1b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212931 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/37cb9609-c336-4c6b-86d5-1d680695f08e-ready\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212950 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c12131a2-d938-4bab-a3f2-b82205ea5a1b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7t5gz\" (UID: \"c12131a2-d938-4bab-a3f2-b82205ea5a1b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212969 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.212989 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213140 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d393854b-5612-4686-8a9f-c6820b3eee5f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tlz6t\" (UID: \"d393854b-5612-4686-8a9f-c6820b3eee5f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213169 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213190 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdaa8c11-2c38-410e-a3f8-c81bbccf25ce-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8ptnh\" (UID: \"fdaa8c11-2c38-410e-a3f8-c81bbccf25ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213214 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213255 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w77px\" (UniqueName: \"kubernetes.io/projected/c0469afa-8902-418c-8545-f5ad9ed2b86b-kube-api-access-w77px\") pod \"openshift-apiserver-operator-796bbdcf4f-sj54m\" (UID: \"c0469afa-8902-418c-8545-f5ad9ed2b86b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213277 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-client-ca\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213299 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213318 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/6806d62e-080e-4c44-a25e-abfd4baa858e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-z2r9q\" (UID: \"6806d62e-080e-4c44-a25e-abfd4baa858e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213315 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213335 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-metrics-tls\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213352 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c35c93ec-8f41-4ded-b3ba-d2533e13de17-cert\") pod \"ingress-canary-khc6m\" (UID: \"c35c93ec-8f41-4ded-b3ba-d2533e13de17\") " pod="openshift-ingress-canary/ingress-canary-khc6m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213368 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-secret-volume\") pod \"collect-profiles-29424075-kgnps\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213385 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4fb89c3c-e4d1-4365-bff9-d2d010030605-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213401 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm7cw\" (UniqueName: \"kubernetes.io/projected/6806d62e-080e-4c44-a25e-abfd4baa858e-kube-api-access-cm7cw\") pod \"cluster-samples-operator-665b6dd947-z2r9q\" (UID: \"6806d62e-080e-4c44-a25e-abfd4baa858e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213549 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nmcnl\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213575 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6393ba4f-7df4-46a1-9156-42002708133c-config\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213596 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213614 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e7837c9-fcd6-4c09-bc3c-f361ce835fce-config\") pod \"service-ca-operator-777779d784-vqzqt\" (UID: \"3e7837c9-fcd6-4c09-bc3c-f361ce835fce\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213640 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-policies\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213662 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c0469afa-8902-418c-8545-f5ad9ed2b86b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-sj54m\" (UID: \"c0469afa-8902-418c-8545-f5ad9ed2b86b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213685 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3f83673-a3d7-4c35-9935-4a0a505732ed-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fvpdj\" (UID: \"e3f83673-a3d7-4c35-9935-4a0a505732ed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213708 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213741 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fdaa8c11-2c38-410e-a3f8-c81bbccf25ce-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8ptnh\" (UID: \"fdaa8c11-2c38-410e-a3f8-c81bbccf25ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213762 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34a801ac-98e6-4a11-b743-ed43c228ce05-config\") pod \"kube-apiserver-operator-766d6c64bb-kr56t\" (UID: \"34a801ac-98e6-4a11-b743-ed43c228ce05\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213799 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213823 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35224190-488f-497f-9ac7-2d37339c9b71-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-52mwx\" (UID: \"35224190-488f-497f-9ac7-2d37339c9b71\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.213849 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7ce8b184-b7bf-4e9e-96e5-b8a116493df7-metrics-tls\") pod \"dns-default-7kz7q\" (UID: \"7ce8b184-b7bf-4e9e-96e5-b8a116493df7\") " pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.214602 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-config\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.215286 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.215585 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:44.715567028 +0000 UTC m=+34.786346614 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.216364 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-client-ca\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.216759 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-policies\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.216756 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34a801ac-98e6-4a11-b743-ed43c228ce05-config\") pod \"kube-apiserver-operator-766d6c64bb-kr56t\" (UID: \"34a801ac-98e6-4a11-b743-ed43c228ce05\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.216823 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4fb89c3c-e4d1-4365-bff9-d2d010030605-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.216851 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gpkd\" (UniqueName: \"kubernetes.io/projected/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-kube-api-access-6gpkd\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.216878 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7e2cf7af-6694-45e9-8d9d-39fce413ba67-audit-dir\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.216899 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcskk\" (UniqueName: \"kubernetes.io/projected/d393854b-5612-4686-8a9f-c6820b3eee5f-kube-api-access-vcskk\") pod \"multus-admission-controller-857f4d67dd-tlz6t\" (UID: \"d393854b-5612-4686-8a9f-c6820b3eee5f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.216922 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34a801ac-98e6-4a11-b743-ed43c228ce05-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kr56t\" (UID: \"34a801ac-98e6-4a11-b743-ed43c228ce05\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.216941 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/34a801ac-98e6-4a11-b743-ed43c228ce05-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kr56t\" (UID: \"34a801ac-98e6-4a11-b743-ed43c228ce05\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.217189 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35224190-488f-497f-9ac7-2d37339c9b71-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-52mwx\" (UID: \"35224190-488f-497f-9ac7-2d37339c9b71\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.217425 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/6806d62e-080e-4c44-a25e-abfd4baa858e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-z2r9q\" (UID: \"6806d62e-080e-4c44-a25e-abfd4baa858e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.217877 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4fb89c3c-e4d1-4365-bff9-d2d010030605-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.216806 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.217980 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.218026 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7e2cf7af-6694-45e9-8d9d-39fce413ba67-audit-dir\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.218068 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.218119 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6393ba4f-7df4-46a1-9156-42002708133c-config\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.218648 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.218880 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8b8e3455-a9d8-480a-b829-665bb41d5bb9-profile-collector-cert\") pod \"olm-operator-6b444d44fb-k6hvj\" (UID: \"8b8e3455-a9d8-480a-b829-665bb41d5bb9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.218913 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.218960 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6393ba4f-7df4-46a1-9156-42002708133c-serving-cert\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.218987 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7e2cf7af-6694-45e9-8d9d-39fce413ba67-encryption-config\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219083 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0469afa-8902-418c-8545-f5ad9ed2b86b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-sj54m\" (UID: \"c0469afa-8902-418c-8545-f5ad9ed2b86b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219112 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8549231d-3a38-4403-aa63-e8f9090db6ef-webhook-cert\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219141 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fdaa8c11-2c38-410e-a3f8-c81bbccf25ce-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8ptnh\" (UID: \"fdaa8c11-2c38-410e-a3f8-c81bbccf25ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219165 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-plugins-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219190 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j55zz\" (UniqueName: \"kubernetes.io/projected/09f0b73c-84d0-4165-8bef-45d954fcad4c-kube-api-access-j55zz\") pod \"package-server-manager-789f6589d5-598q7\" (UID: \"09f0b73c-84d0-4165-8bef-45d954fcad4c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219212 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szpk5\" (UniqueName: \"kubernetes.io/projected/8549231d-3a38-4403-aa63-e8f9090db6ef-kube-api-access-szpk5\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219315 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6393ba4f-7df4-46a1-9156-42002708133c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219348 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-trusted-ca\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219383 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nk9v6\" (UniqueName: \"kubernetes.io/projected/b3670533-6639-4b4f-88f4-15ebb2f2b8be-kube-api-access-nk9v6\") pod \"migrator-59844c95c7-xd6tv\" (UID: \"b3670533-6639-4b4f-88f4-15ebb2f2b8be\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219414 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-registration-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219521 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03fbacdb-e497-4e45-af5d-04099435b951-config\") pod \"kube-controller-manager-operator-78b949d7b-wpxdt\" (UID: \"03fbacdb-e497-4e45-af5d-04099435b951\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219553 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efb6f672-100b-4da6-951e-7fd6326353a6-serving-cert\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219579 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efb6f672-100b-4da6-951e-7fd6326353a6-config\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219602 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/efb6f672-100b-4da6-951e-7fd6326353a6-etcd-ca\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219649 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w48x9\" (UniqueName: \"kubernetes.io/projected/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-kube-api-access-w48x9\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219674 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c29nr\" (UniqueName: \"kubernetes.io/projected/6fc63325-1134-4106-aa40-35d3ec59fc5a-kube-api-access-c29nr\") pod \"service-ca-9c57cc56f-fndxq\" (UID: \"6fc63325-1134-4106-aa40-35d3ec59fc5a\") " pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219699 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4664\" (UniqueName: \"kubernetes.io/projected/3e7837c9-fcd6-4c09-bc3c-f361ce835fce-kube-api-access-z4664\") pod \"service-ca-operator-777779d784-vqzqt\" (UID: \"3e7837c9-fcd6-4c09-bc3c-f361ce835fce\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219727 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219764 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219794 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6393ba4f-7df4-46a1-9156-42002708133c-service-ca-bundle\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219818 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6ndj\" (UniqueName: \"kubernetes.io/projected/c35c93ec-8f41-4ded-b3ba-d2533e13de17-kube-api-access-k6ndj\") pod \"ingress-canary-khc6m\" (UID: \"c35c93ec-8f41-4ded-b3ba-d2533e13de17\") " pod="openshift-ingress-canary/ingress-canary-khc6m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219841 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7ce8b184-b7bf-4e9e-96e5-b8a116493df7-config-volume\") pod \"dns-default-7kz7q\" (UID: \"7ce8b184-b7bf-4e9e-96e5-b8a116493df7\") " pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219874 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/efb6f672-100b-4da6-951e-7fd6326353a6-etcd-client\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219898 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv65l\" (UniqueName: \"kubernetes.io/projected/dcfe9e49-8116-4af5-84db-b958e5d3104b-kube-api-access-wv65l\") pod \"downloads-7954f5f757-dstfn\" (UID: \"dcfe9e49-8116-4af5-84db-b958e5d3104b\") " pod="openshift-console/downloads-7954f5f757-dstfn" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219920 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8b8e3455-a9d8-480a-b829-665bb41d5bb9-srv-cert\") pod \"olm-operator-6b444d44fb-k6hvj\" (UID: \"8b8e3455-a9d8-480a-b829-665bb41d5bb9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219948 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.219974 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-proxy-tls\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220172 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwwrp\" (UniqueName: \"kubernetes.io/projected/6393ba4f-7df4-46a1-9156-42002708133c-kube-api-access-kwwrp\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220202 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6fc63325-1134-4106-aa40-35d3ec59fc5a-signing-key\") pod \"service-ca-9c57cc56f-fndxq\" (UID: \"6fc63325-1134-4106-aa40-35d3ec59fc5a\") " pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220248 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3f83673-a3d7-4c35-9935-4a0a505732ed-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fvpdj\" (UID: \"e3f83673-a3d7-4c35-9935-4a0a505732ed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220292 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8549231d-3a38-4403-aa63-e8f9090db6ef-tmpfs\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220321 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ps97\" (UniqueName: \"kubernetes.io/projected/f414d905-192d-42bc-b77c-0d05b5ff8b2f-kube-api-access-6ps97\") pod \"machine-config-server-c9cxs\" (UID: \"f414d905-192d-42bc-b77c-0d05b5ff8b2f\") " pod="openshift-machine-config-operator/machine-config-server-c9cxs" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220346 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7e2cf7af-6694-45e9-8d9d-39fce413ba67-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220370 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxfxb\" (UniqueName: \"kubernetes.io/projected/efb6f672-100b-4da6-951e-7fd6326353a6-kube-api-access-gxfxb\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220394 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69sj6\" (UniqueName: \"kubernetes.io/projected/19a593cb-a446-4977-9235-4b4036d2f2f8-kube-api-access-69sj6\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220417 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7e2cf7af-6694-45e9-8d9d-39fce413ba67-etcd-client\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220440 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220461 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-socket-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220483 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/903b0a02-6ac7-420c-9ba5-3d9562d57168-profile-collector-cert\") pod \"catalog-operator-68c6474976-9zzr5\" (UID: \"903b0a02-6ac7-420c-9ba5-3d9562d57168\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220501 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e7837c9-fcd6-4c09-bc3c-f361ce835fce-serving-cert\") pod \"service-ca-operator-777779d784-vqzqt\" (UID: \"3e7837c9-fcd6-4c09-bc3c-f361ce835fce\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220524 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8x64\" (UniqueName: \"kubernetes.io/projected/7ce8b184-b7bf-4e9e-96e5-b8a116493df7-kube-api-access-s8x64\") pod \"dns-default-7kz7q\" (UID: \"7ce8b184-b7bf-4e9e-96e5-b8a116493df7\") " pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220549 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkbrf\" (UniqueName: \"kubernetes.io/projected/4fb89c3c-e4d1-4365-bff9-d2d010030605-kube-api-access-mkbrf\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220574 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcwqj\" (UniqueName: \"kubernetes.io/projected/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-kube-api-access-pcwqj\") pod \"collect-profiles-29424075-kgnps\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220598 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220624 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wlbm\" (UniqueName: \"kubernetes.io/projected/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-kube-api-access-9wlbm\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220651 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35224190-488f-497f-9ac7-2d37339c9b71-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-52mwx\" (UID: \"35224190-488f-497f-9ac7-2d37339c9b71\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220674 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/09f0b73c-84d0-4165-8bef-45d954fcad4c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-598q7\" (UID: \"09f0b73c-84d0-4165-8bef-45d954fcad4c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220694 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-config-volume\") pod \"collect-profiles-29424075-kgnps\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220723 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/b39d101c-fbdd-427c-9369-cbfde9bb50cd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5ztq4\" (UID: \"b39d101c-fbdd-427c-9369-cbfde9bb50cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220747 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f523f4a-d652-40cc-9613-5058118a5c79-metrics-tls\") pod \"dns-operator-744455d44c-c7fld\" (UID: \"8f523f4a-d652-40cc-9613-5058118a5c79\") " pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220870 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3f83673-a3d7-4c35-9935-4a0a505732ed-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fvpdj\" (UID: \"e3f83673-a3d7-4c35-9935-4a0a505732ed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220962 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e2cf7af-6694-45e9-8d9d-39fce413ba67-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.220990 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmg22\" (UniqueName: \"kubernetes.io/projected/926d4b75-3809-4fce-89e7-8076befa3b1b-kube-api-access-mmg22\") pod \"marketplace-operator-79b997595-nmcnl\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221015 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-268fj\" (UniqueName: \"kubernetes.io/projected/35224190-488f-497f-9ac7-2d37339c9b71-kube-api-access-268fj\") pod \"kube-storage-version-migrator-operator-b67b599dd-52mwx\" (UID: \"35224190-488f-497f-9ac7-2d37339c9b71\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221038 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221061 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/03fbacdb-e497-4e45-af5d-04099435b951-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-wpxdt\" (UID: \"03fbacdb-e497-4e45-af5d-04099435b951\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221084 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f414d905-192d-42bc-b77c-0d05b5ff8b2f-certs\") pod \"machine-config-server-c9cxs\" (UID: \"f414d905-192d-42bc-b77c-0d05b5ff8b2f\") " pod="openshift-machine-config-operator/machine-config-server-c9cxs" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221105 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m55t2\" (UniqueName: \"kubernetes.io/projected/b9fcc600-c510-4fc7-a539-02ff3f235ee4-kube-api-access-m55t2\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221135 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/efb6f672-100b-4da6-951e-7fd6326353a6-etcd-service-ca\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221166 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrkww\" (UniqueName: \"kubernetes.io/projected/7e2cf7af-6694-45e9-8d9d-39fce413ba67-kube-api-access-hrkww\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221194 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221249 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7e2cf7af-6694-45e9-8d9d-39fce413ba67-audit-policies\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221276 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/37cb9609-c336-4c6b-86d5-1d680695f08e-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221302 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qq85j\" (UniqueName: \"kubernetes.io/projected/e3f83673-a3d7-4c35-9935-4a0a505732ed-kube-api-access-qq85j\") pod \"openshift-controller-manager-operator-756b6f6bc6-fvpdj\" (UID: \"e3f83673-a3d7-4c35-9935-4a0a505732ed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221328 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-images\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.236648 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvfmz\" (UniqueName: \"kubernetes.io/projected/8f523f4a-d652-40cc-9613-5058118a5c79-kube-api-access-xvfmz\") pod \"dns-operator-744455d44c-c7fld\" (UID: \"8f523f4a-d652-40cc-9613-5058118a5c79\") " pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.222917 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34a801ac-98e6-4a11-b743-ed43c228ce05-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kr56t\" (UID: \"34a801ac-98e6-4a11-b743-ed43c228ce05\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.223266 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c12131a2-d938-4bab-a3f2-b82205ea5a1b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7t5gz\" (UID: \"c12131a2-d938-4bab-a3f2-b82205ea5a1b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.223290 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-dir\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.224647 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-metrics-tls\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.225770 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e2cf7af-6694-45e9-8d9d-39fce413ba67-serving-cert\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.226558 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a71f8b6c-2868-4df6-bafe-a1dc5a0a751e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-v858r\" (UID: \"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.226997 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4fb89c3c-e4d1-4365-bff9-d2d010030605-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.228085 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.229310 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/efb6f672-100b-4da6-951e-7fd6326353a6-etcd-service-ca\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.231982 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e2cf7af-6694-45e9-8d9d-39fce413ba67-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.232522 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.232534 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7e2cf7af-6694-45e9-8d9d-39fce413ba67-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.232979 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-proxy-tls\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.233494 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c0469afa-8902-418c-8545-f5ad9ed2b86b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-sj54m\" (UID: \"c0469afa-8902-418c-8545-f5ad9ed2b86b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.233813 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.234222 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efb6f672-100b-4da6-951e-7fd6326353a6-config\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.234729 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7e2cf7af-6694-45e9-8d9d-39fce413ba67-audit-policies\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.235005 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6393ba4f-7df4-46a1-9156-42002708133c-service-ca-bundle\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.235259 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35224190-488f-497f-9ac7-2d37339c9b71-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-52mwx\" (UID: \"35224190-488f-497f-9ac7-2d37339c9b71\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.235263 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6393ba4f-7df4-46a1-9156-42002708133c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.235275 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.236111 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-trusted-ca\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.221942 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.237681 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c12131a2-d938-4bab-a3f2-b82205ea5a1b-serving-cert\") pod \"openshift-config-operator-7777fb866f-7t5gz\" (UID: \"c12131a2-d938-4bab-a3f2-b82205ea5a1b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.235826 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/efb6f672-100b-4da6-951e-7fd6326353a6-etcd-ca\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.238597 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/efb6f672-100b-4da6-951e-7fd6326353a6-etcd-client\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.238817 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-images\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.239131 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.239942 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d393854b-5612-4686-8a9f-c6820b3eee5f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tlz6t\" (UID: \"d393854b-5612-4686-8a9f-c6820b3eee5f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.240330 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efb6f672-100b-4da6-951e-7fd6326353a6-serving-cert\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.243643 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.243729 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a71f8b6c-2868-4df6-bafe-a1dc5a0a751e-proxy-tls\") pod \"machine-config-controller-84d6567774-v858r\" (UID: \"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.247712 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.247898 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3f83673-a3d7-4c35-9935-4a0a505732ed-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fvpdj\" (UID: \"e3f83673-a3d7-4c35-9935-4a0a505732ed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.248502 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/b39d101c-fbdd-427c-9369-cbfde9bb50cd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5ztq4\" (UID: \"b39d101c-fbdd-427c-9369-cbfde9bb50cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.249778 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.250411 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7e2cf7af-6694-45e9-8d9d-39fce413ba67-etcd-client\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.253235 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7e2cf7af-6694-45e9-8d9d-39fce413ba67-encryption-config\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.253922 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.254020 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6393ba4f-7df4-46a1-9156-42002708133c-serving-cert\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.258564 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0469afa-8902-418c-8545-f5ad9ed2b86b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-sj54m\" (UID: \"c0469afa-8902-418c-8545-f5ad9ed2b86b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.263200 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19a593cb-a446-4977-9235-4b4036d2f2f8-serving-cert\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.264466 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.277342 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtmmd\" (UniqueName: \"kubernetes.io/projected/a71f8b6c-2868-4df6-bafe-a1dc5a0a751e-kube-api-access-qtmmd\") pod \"machine-config-controller-84d6567774-v858r\" (UID: \"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.280495 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.293943 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcpqk\" (UniqueName: \"kubernetes.io/projected/b39d101c-fbdd-427c-9369-cbfde9bb50cd-kube-api-access-bcpqk\") pod \"control-plane-machine-set-operator-78cbb6b69f-5ztq4\" (UID: \"b39d101c-fbdd-427c-9369-cbfde9bb50cd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4" Dec 11 09:21:44 crc kubenswrapper[4788]: W1211 09:21:44.298421 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90a71b66_dd01_4895_8b06_98a8f650cabc.slice/crio-fa26223ad27ac3ef120b3645d80245f03cdb7ed60e83555f7ae0ef2d07ae899c WatchSource:0}: Error finding container fa26223ad27ac3ef120b3645d80245f03cdb7ed60e83555f7ae0ef2d07ae899c: Status 404 returned error can't find the container with id fa26223ad27ac3ef120b3645d80245f03cdb7ed60e83555f7ae0ef2d07ae899c Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.306851 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.316680 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm7cw\" (UniqueName: \"kubernetes.io/projected/6806d62e-080e-4c44-a25e-abfd4baa858e-kube-api-access-cm7cw\") pod \"cluster-samples-operator-665b6dd947-z2r9q\" (UID: \"6806d62e-080e-4c44-a25e-abfd4baa858e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.318121 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w77px\" (UniqueName: \"kubernetes.io/projected/c0469afa-8902-418c-8545-f5ad9ed2b86b-kube-api-access-w77px\") pod \"openshift-apiserver-operator-796bbdcf4f-sj54m\" (UID: \"c0469afa-8902-418c-8545-f5ad9ed2b86b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.343940 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.344311 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8b8e3455-a9d8-480a-b829-665bb41d5bb9-profile-collector-cert\") pod \"olm-operator-6b444d44fb-k6hvj\" (UID: \"8b8e3455-a9d8-480a-b829-665bb41d5bb9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.344689 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:44.844654723 +0000 UTC m=+34.915434319 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351285 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8549231d-3a38-4403-aa63-e8f9090db6ef-webhook-cert\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351645 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fdaa8c11-2c38-410e-a3f8-c81bbccf25ce-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8ptnh\" (UID: \"fdaa8c11-2c38-410e-a3f8-c81bbccf25ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351674 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-plugins-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351701 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j55zz\" (UniqueName: \"kubernetes.io/projected/09f0b73c-84d0-4165-8bef-45d954fcad4c-kube-api-access-j55zz\") pod \"package-server-manager-789f6589d5-598q7\" (UID: \"09f0b73c-84d0-4165-8bef-45d954fcad4c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351759 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szpk5\" (UniqueName: \"kubernetes.io/projected/8549231d-3a38-4403-aa63-e8f9090db6ef-kube-api-access-szpk5\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351793 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nk9v6\" (UniqueName: \"kubernetes.io/projected/b3670533-6639-4b4f-88f4-15ebb2f2b8be-kube-api-access-nk9v6\") pod \"migrator-59844c95c7-xd6tv\" (UID: \"b3670533-6639-4b4f-88f4-15ebb2f2b8be\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351818 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-registration-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351840 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03fbacdb-e497-4e45-af5d-04099435b951-config\") pod \"kube-controller-manager-operator-78b949d7b-wpxdt\" (UID: \"03fbacdb-e497-4e45-af5d-04099435b951\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351921 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c29nr\" (UniqueName: \"kubernetes.io/projected/6fc63325-1134-4106-aa40-35d3ec59fc5a-kube-api-access-c29nr\") pod \"service-ca-9c57cc56f-fndxq\" (UID: \"6fc63325-1134-4106-aa40-35d3ec59fc5a\") " pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351946 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4664\" (UniqueName: \"kubernetes.io/projected/3e7837c9-fcd6-4c09-bc3c-f361ce835fce-kube-api-access-z4664\") pod \"service-ca-operator-777779d784-vqzqt\" (UID: \"3e7837c9-fcd6-4c09-bc3c-f361ce835fce\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351974 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6ndj\" (UniqueName: \"kubernetes.io/projected/c35c93ec-8f41-4ded-b3ba-d2533e13de17-kube-api-access-k6ndj\") pod \"ingress-canary-khc6m\" (UID: \"c35c93ec-8f41-4ded-b3ba-d2533e13de17\") " pod="openshift-ingress-canary/ingress-canary-khc6m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351995 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7ce8b184-b7bf-4e9e-96e5-b8a116493df7-config-volume\") pod \"dns-default-7kz7q\" (UID: \"7ce8b184-b7bf-4e9e-96e5-b8a116493df7\") " pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.351989 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8549231d-3a38-4403-aa63-e8f9090db6ef-webhook-cert\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.353151 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8b8e3455-a9d8-480a-b829-665bb41d5bb9-profile-collector-cert\") pod \"olm-operator-6b444d44fb-k6hvj\" (UID: \"8b8e3455-a9d8-480a-b829-665bb41d5bb9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.353375 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-registration-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.353413 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4fb89c3c-e4d1-4365-bff9-d2d010030605-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.353555 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-plugins-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354206 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7ce8b184-b7bf-4e9e-96e5-b8a116493df7-config-volume\") pod \"dns-default-7kz7q\" (UID: \"7ce8b184-b7bf-4e9e-96e5-b8a116493df7\") " pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354314 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8b8e3455-a9d8-480a-b829-665bb41d5bb9-srv-cert\") pod \"olm-operator-6b444d44fb-k6hvj\" (UID: \"8b8e3455-a9d8-480a-b829-665bb41d5bb9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354364 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6fc63325-1134-4106-aa40-35d3ec59fc5a-signing-key\") pod \"service-ca-9c57cc56f-fndxq\" (UID: \"6fc63325-1134-4106-aa40-35d3ec59fc5a\") " pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354393 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8549231d-3a38-4403-aa63-e8f9090db6ef-tmpfs\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354429 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ps97\" (UniqueName: \"kubernetes.io/projected/f414d905-192d-42bc-b77c-0d05b5ff8b2f-kube-api-access-6ps97\") pod \"machine-config-server-c9cxs\" (UID: \"f414d905-192d-42bc-b77c-0d05b5ff8b2f\") " pod="openshift-machine-config-operator/machine-config-server-c9cxs" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354470 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-socket-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354496 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/903b0a02-6ac7-420c-9ba5-3d9562d57168-profile-collector-cert\") pod \"catalog-operator-68c6474976-9zzr5\" (UID: \"903b0a02-6ac7-420c-9ba5-3d9562d57168\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354519 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e7837c9-fcd6-4c09-bc3c-f361ce835fce-serving-cert\") pod \"service-ca-operator-777779d784-vqzqt\" (UID: \"3e7837c9-fcd6-4c09-bc3c-f361ce835fce\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354569 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8x64\" (UniqueName: \"kubernetes.io/projected/7ce8b184-b7bf-4e9e-96e5-b8a116493df7-kube-api-access-s8x64\") pod \"dns-default-7kz7q\" (UID: \"7ce8b184-b7bf-4e9e-96e5-b8a116493df7\") " pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354607 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcwqj\" (UniqueName: \"kubernetes.io/projected/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-kube-api-access-pcwqj\") pod \"collect-profiles-29424075-kgnps\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354671 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/09f0b73c-84d0-4165-8bef-45d954fcad4c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-598q7\" (UID: \"09f0b73c-84d0-4165-8bef-45d954fcad4c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354703 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f523f4a-d652-40cc-9613-5058118a5c79-metrics-tls\") pod \"dns-operator-744455d44c-c7fld\" (UID: \"8f523f4a-d652-40cc-9613-5058118a5c79\") " pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354723 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-config-volume\") pod \"collect-profiles-29424075-kgnps\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354748 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmg22\" (UniqueName: \"kubernetes.io/projected/926d4b75-3809-4fce-89e7-8076befa3b1b-kube-api-access-mmg22\") pod \"marketplace-operator-79b997595-nmcnl\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354782 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/03fbacdb-e497-4e45-af5d-04099435b951-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-wpxdt\" (UID: \"03fbacdb-e497-4e45-af5d-04099435b951\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354809 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f414d905-192d-42bc-b77c-0d05b5ff8b2f-certs\") pod \"machine-config-server-c9cxs\" (UID: \"f414d905-192d-42bc-b77c-0d05b5ff8b2f\") " pod="openshift-machine-config-operator/machine-config-server-c9cxs" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354833 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m55t2\" (UniqueName: \"kubernetes.io/projected/b9fcc600-c510-4fc7-a539-02ff3f235ee4-kube-api-access-m55t2\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354868 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/37cb9609-c336-4c6b-86d5-1d680695f08e-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354900 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvfmz\" (UniqueName: \"kubernetes.io/projected/8f523f4a-d652-40cc-9613-5058118a5c79-kube-api-access-xvfmz\") pod \"dns-operator-744455d44c-c7fld\" (UID: \"8f523f4a-d652-40cc-9613-5058118a5c79\") " pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.354941 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f414d905-192d-42bc-b77c-0d05b5ff8b2f-node-bootstrap-token\") pod \"machine-config-server-c9cxs\" (UID: \"f414d905-192d-42bc-b77c-0d05b5ff8b2f\") " pod="openshift-machine-config-operator/machine-config-server-c9cxs" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355288 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92gl9\" (UniqueName: \"kubernetes.io/projected/37cb9609-c336-4c6b-86d5-1d680695f08e-kube-api-access-92gl9\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355329 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03fbacdb-e497-4e45-af5d-04099435b951-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-wpxdt\" (UID: \"03fbacdb-e497-4e45-af5d-04099435b951\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355356 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9f7c\" (UniqueName: \"kubernetes.io/projected/903b0a02-6ac7-420c-9ba5-3d9562d57168-kube-api-access-r9f7c\") pod \"catalog-operator-68c6474976-9zzr5\" (UID: \"903b0a02-6ac7-420c-9ba5-3d9562d57168\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355384 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-csi-data-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355412 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkvsg\" (UniqueName: \"kubernetes.io/projected/8b8e3455-a9d8-480a-b829-665bb41d5bb9-kube-api-access-rkvsg\") pod \"olm-operator-6b444d44fb-k6hvj\" (UID: \"8b8e3455-a9d8-480a-b829-665bb41d5bb9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355436 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-mountpoint-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355459 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/903b0a02-6ac7-420c-9ba5-3d9562d57168-srv-cert\") pod \"catalog-operator-68c6474976-9zzr5\" (UID: \"903b0a02-6ac7-420c-9ba5-3d9562d57168\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355483 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nmcnl\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355507 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8549231d-3a38-4403-aa63-e8f9090db6ef-apiservice-cert\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355530 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/37cb9609-c336-4c6b-86d5-1d680695f08e-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355665 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03fbacdb-e497-4e45-af5d-04099435b951-config\") pod \"kube-controller-manager-operator-78b949d7b-wpxdt\" (UID: \"03fbacdb-e497-4e45-af5d-04099435b951\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.355824 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-socket-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.357021 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-mg2kx"] Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.357337 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6fc63325-1134-4106-aa40-35d3ec59fc5a-signing-cabundle\") pod \"service-ca-9c57cc56f-fndxq\" (UID: \"6fc63325-1134-4106-aa40-35d3ec59fc5a\") " pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.357388 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/37cb9609-c336-4c6b-86d5-1d680695f08e-ready\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.357425 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdaa8c11-2c38-410e-a3f8-c81bbccf25ce-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8ptnh\" (UID: \"fdaa8c11-2c38-410e-a3f8-c81bbccf25ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.357457 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.357485 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c35c93ec-8f41-4ded-b3ba-d2533e13de17-cert\") pod \"ingress-canary-khc6m\" (UID: \"c35c93ec-8f41-4ded-b3ba-d2533e13de17\") " pod="openshift-ingress-canary/ingress-canary-khc6m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.357505 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-secret-volume\") pod \"collect-profiles-29424075-kgnps\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.357530 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nmcnl\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.357553 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e7837c9-fcd6-4c09-bc3c-f361ce835fce-config\") pod \"service-ca-operator-777779d784-vqzqt\" (UID: \"3e7837c9-fcd6-4c09-bc3c-f361ce835fce\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.357581 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fdaa8c11-2c38-410e-a3f8-c81bbccf25ce-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8ptnh\" (UID: \"fdaa8c11-2c38-410e-a3f8-c81bbccf25ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.357607 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7ce8b184-b7bf-4e9e-96e5-b8a116493df7-metrics-tls\") pod \"dns-default-7kz7q\" (UID: \"7ce8b184-b7bf-4e9e-96e5-b8a116493df7\") " pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.363153 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/37cb9609-c336-4c6b-86d5-1d680695f08e-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.363980 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-config-volume\") pod \"collect-profiles-29424075-kgnps\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.364000 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/37cb9609-c336-4c6b-86d5-1d680695f08e-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.365459 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/37cb9609-c336-4c6b-86d5-1d680695f08e-ready\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.365645 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/903b0a02-6ac7-420c-9ba5-3d9562d57168-profile-collector-cert\") pod \"catalog-operator-68c6474976-9zzr5\" (UID: \"903b0a02-6ac7-420c-9ba5-3d9562d57168\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.366055 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6fc63325-1134-4106-aa40-35d3ec59fc5a-signing-cabundle\") pod \"service-ca-9c57cc56f-fndxq\" (UID: \"6fc63325-1134-4106-aa40-35d3ec59fc5a\") " pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.366214 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-mountpoint-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.366356 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b9fcc600-c510-4fc7-a539-02ff3f235ee4-csi-data-dir\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.366663 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:44.866648023 +0000 UTC m=+34.937427609 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.366807 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdaa8c11-2c38-410e-a3f8-c81bbccf25ce-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8ptnh\" (UID: \"fdaa8c11-2c38-410e-a3f8-c81bbccf25ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.367803 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nmcnl\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.368309 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f523f4a-d652-40cc-9613-5058118a5c79-metrics-tls\") pod \"dns-operator-744455d44c-c7fld\" (UID: \"8f523f4a-d652-40cc-9613-5058118a5c79\") " pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.369106 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e7837c9-fcd6-4c09-bc3c-f361ce835fce-config\") pod \"service-ca-operator-777779d784-vqzqt\" (UID: \"3e7837c9-fcd6-4c09-bc3c-f361ce835fce\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.369819 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8549231d-3a38-4403-aa63-e8f9090db6ef-tmpfs\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.370651 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/903b0a02-6ac7-420c-9ba5-3d9562d57168-srv-cert\") pod \"catalog-operator-68c6474976-9zzr5\" (UID: \"903b0a02-6ac7-420c-9ba5-3d9562d57168\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.372154 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7ce8b184-b7bf-4e9e-96e5-b8a116493df7-metrics-tls\") pod \"dns-default-7kz7q\" (UID: \"7ce8b184-b7bf-4e9e-96e5-b8a116493df7\") " pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.372321 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e7837c9-fcd6-4c09-bc3c-f361ce835fce-serving-cert\") pod \"service-ca-operator-777779d784-vqzqt\" (UID: \"3e7837c9-fcd6-4c09-bc3c-f361ce835fce\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.375137 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8b8e3455-a9d8-480a-b829-665bb41d5bb9-srv-cert\") pod \"olm-operator-6b444d44fb-k6hvj\" (UID: \"8b8e3455-a9d8-480a-b829-665bb41d5bb9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.376139 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gpkd\" (UniqueName: \"kubernetes.io/projected/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-kube-api-access-6gpkd\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.377273 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6fc63325-1134-4106-aa40-35d3ec59fc5a-signing-key\") pod \"service-ca-9c57cc56f-fndxq\" (UID: \"6fc63325-1134-4106-aa40-35d3ec59fc5a\") " pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.382253 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/09f0b73c-84d0-4165-8bef-45d954fcad4c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-598q7\" (UID: \"09f0b73c-84d0-4165-8bef-45d954fcad4c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.384782 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8549231d-3a38-4403-aa63-e8f9090db6ef-apiservice-cert\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.389174 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03fbacdb-e497-4e45-af5d-04099435b951-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-wpxdt\" (UID: \"03fbacdb-e497-4e45-af5d-04099435b951\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.390435 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f414d905-192d-42bc-b77c-0d05b5ff8b2f-node-bootstrap-token\") pod \"machine-config-server-c9cxs\" (UID: \"f414d905-192d-42bc-b77c-0d05b5ff8b2f\") " pod="openshift-machine-config-operator/machine-config-server-c9cxs" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.390793 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f414d905-192d-42bc-b77c-0d05b5ff8b2f-certs\") pod \"machine-config-server-c9cxs\" (UID: \"f414d905-192d-42bc-b77c-0d05b5ff8b2f\") " pod="openshift-machine-config-operator/machine-config-server-c9cxs" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.391636 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c35c93ec-8f41-4ded-b3ba-d2533e13de17-cert\") pod \"ingress-canary-khc6m\" (UID: \"c35c93ec-8f41-4ded-b3ba-d2533e13de17\") " pod="openshift-ingress-canary/ingress-canary-khc6m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.391901 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nmcnl\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.392537 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-secret-volume\") pod \"collect-profiles-29424075-kgnps\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.393678 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcskk\" (UniqueName: \"kubernetes.io/projected/d393854b-5612-4686-8a9f-c6820b3eee5f-kube-api-access-vcskk\") pod \"multus-admission-controller-857f4d67dd-tlz6t\" (UID: \"d393854b-5612-4686-8a9f-c6820b3eee5f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.403889 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.405853 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-47nzd"] Dec 11 09:21:44 crc kubenswrapper[4788]: W1211 09:21:44.408661 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0783fcec_bf1b_4910_b8c2_08d85c53093a.slice/crio-a321e1ec8c06a10cc86eaeae042721055c6989f9170a50d7c16f32a3ccd1cd8e WatchSource:0}: Error finding container a321e1ec8c06a10cc86eaeae042721055c6989f9170a50d7c16f32a3ccd1cd8e: Status 404 returned error can't find the container with id a321e1ec8c06a10cc86eaeae042721055c6989f9170a50d7c16f32a3ccd1cd8e Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.410726 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/34a801ac-98e6-4a11-b743-ed43c228ce05-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kr56t\" (UID: \"34a801ac-98e6-4a11-b743-ed43c228ce05\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.419555 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh"] Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.422609 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fdaa8c11-2c38-410e-a3f8-c81bbccf25ce-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8ptnh\" (UID: \"fdaa8c11-2c38-410e-a3f8-c81bbccf25ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.422856 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgf6w\" (UniqueName: \"kubernetes.io/projected/c12131a2-d938-4bab-a3f2-b82205ea5a1b-kube-api-access-mgf6w\") pod \"openshift-config-operator-7777fb866f-7t5gz\" (UID: \"c12131a2-d938-4bab-a3f2-b82205ea5a1b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:44 crc kubenswrapper[4788]: W1211 09:21:44.424473 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd4a1f90_9ac0_41cc_b980_91964f48715d.slice/crio-303a2064f0090d8bc000b0181f0d6abd6952fcb1dcc614905e8697b60d9608bd WatchSource:0}: Error finding container 303a2064f0090d8bc000b0181f0d6abd6952fcb1dcc614905e8697b60d9608bd: Status 404 returned error can't find the container with id 303a2064f0090d8bc000b0181f0d6abd6952fcb1dcc614905e8697b60d9608bd Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.447117 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.456536 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9dc8716-98a0-4154-b72b-24fac0d3ebf1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-cwpbz\" (UID: \"d9dc8716-98a0-4154-b72b-24fac0d3ebf1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.458144 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.458892 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:44.958870987 +0000 UTC m=+35.029650573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.459839 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-jdvlj"] Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.462294 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkbrf\" (UniqueName: \"kubernetes.io/projected/4fb89c3c-e4d1-4365-bff9-d2d010030605-kube-api-access-mkbrf\") pod \"cluster-image-registry-operator-dc59b4c8b-rkcgk\" (UID: \"4fb89c3c-e4d1-4365-bff9-d2d010030605\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.483889 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrkww\" (UniqueName: \"kubernetes.io/projected/7e2cf7af-6694-45e9-8d9d-39fce413ba67-kube-api-access-hrkww\") pod \"apiserver-7bbb656c7d-rt2ms\" (UID: \"7e2cf7af-6694-45e9-8d9d-39fce413ba67\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: W1211 09:21:44.494685 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d7342e7_facb_49b4_adee_0e6e25c9fa8e.slice/crio-2d967030b3c8c9b46c87e3ddac89586e700c60edad3cf24e71ed01c8e6cc41e4 WatchSource:0}: Error finding container 2d967030b3c8c9b46c87e3ddac89586e700c60edad3cf24e71ed01c8e6cc41e4: Status 404 returned error can't find the container with id 2d967030b3c8c9b46c87e3ddac89586e700c60edad3cf24e71ed01c8e6cc41e4 Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.504031 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxfxb\" (UniqueName: \"kubernetes.io/projected/efb6f672-100b-4da6-951e-7fd6326353a6-kube-api-access-gxfxb\") pod \"etcd-operator-b45778765-9n777\" (UID: \"efb6f672-100b-4da6-951e-7fd6326353a6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.521679 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.536845 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.546162 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wlbm\" (UniqueName: \"kubernetes.io/projected/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-kube-api-access-9wlbm\") pod \"oauth-openshift-558db77b4-zbjxm\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.558406 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.559374 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.559890 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.059857931 +0000 UTC m=+35.130637697 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.561302 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-268fj\" (UniqueName: \"kubernetes.io/projected/35224190-488f-497f-9ac7-2d37339c9b71-kube-api-access-268fj\") pod \"kube-storage-version-migrator-operator-b67b599dd-52mwx\" (UID: \"35224190-488f-497f-9ac7-2d37339c9b71\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.569554 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.574655 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.581584 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv65l\" (UniqueName: \"kubernetes.io/projected/dcfe9e49-8116-4af5-84db-b958e5d3104b-kube-api-access-wv65l\") pod \"downloads-7954f5f757-dstfn\" (UID: \"dcfe9e49-8116-4af5-84db-b958e5d3104b\") " pod="openshift-console/downloads-7954f5f757-dstfn" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.587785 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-v858r"] Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.589667 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.598320 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.615654 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.618068 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.621744 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w48x9\" (UniqueName: \"kubernetes.io/projected/2ef5dd28-6a46-4364-9c71-56ddbb25c0dd-kube-api-access-w48x9\") pod \"machine-config-operator-74547568cd-fsmmk\" (UID: \"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.642451 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwwrp\" (UniqueName: \"kubernetes.io/projected/6393ba4f-7df4-46a1-9156-42002708133c-kube-api-access-kwwrp\") pod \"authentication-operator-69f744f599-xm9p5\" (UID: \"6393ba4f-7df4-46a1-9156-42002708133c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.659858 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q"] Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.660281 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.660783 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.160766302 +0000 UTC m=+35.231545888 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.668315 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qq85j\" (UniqueName: \"kubernetes.io/projected/e3f83673-a3d7-4c35-9935-4a0a505732ed-kube-api-access-qq85j\") pod \"openshift-controller-manager-operator-756b6f6bc6-fvpdj\" (UID: \"e3f83673-a3d7-4c35-9935-4a0a505732ed\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.705331 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c29nr\" (UniqueName: \"kubernetes.io/projected/6fc63325-1134-4106-aa40-35d3ec59fc5a-kube-api-access-c29nr\") pod \"service-ca-9c57cc56f-fndxq\" (UID: \"6fc63325-1134-4106-aa40-35d3ec59fc5a\") " pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.725676 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-gstnh"] Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.726185 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4664\" (UniqueName: \"kubernetes.io/projected/3e7837c9-fcd6-4c09-bc3c-f361ce835fce-kube-api-access-z4664\") pod \"service-ca-operator-777779d784-vqzqt\" (UID: \"3e7837c9-fcd6-4c09-bc3c-f361ce835fce\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.750567 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6ndj\" (UniqueName: \"kubernetes.io/projected/c35c93ec-8f41-4ded-b3ba-d2533e13de17-kube-api-access-k6ndj\") pod \"ingress-canary-khc6m\" (UID: \"c35c93ec-8f41-4ded-b3ba-d2533e13de17\") " pod="openshift-ingress-canary/ingress-canary-khc6m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.755212 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.758065 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.759068 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j55zz\" (UniqueName: \"kubernetes.io/projected/09f0b73c-84d0-4165-8bef-45d954fcad4c-kube-api-access-j55zz\") pod \"package-server-manager-789f6589d5-598q7\" (UID: \"09f0b73c-84d0-4165-8bef-45d954fcad4c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.762443 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.763187 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.263165351 +0000 UTC m=+35.333944987 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.772728 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.778698 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fdaa8c11-2c38-410e-a3f8-c81bbccf25ce-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8ptnh\" (UID: \"fdaa8c11-2c38-410e-a3f8-c81bbccf25ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.789001 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dstfn" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.790981 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-khc6m" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.800496 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szpk5\" (UniqueName: \"kubernetes.io/projected/8549231d-3a38-4403-aa63-e8f9090db6ef-kube-api-access-szpk5\") pod \"packageserver-d55dfcdfc-nqszp\" (UID: \"8549231d-3a38-4403-aa63-e8f9090db6ef\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.806581 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.818984 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcwqj\" (UniqueName: \"kubernetes.io/projected/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-kube-api-access-pcwqj\") pod \"collect-profiles-29424075-kgnps\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.826813 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.850999 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.863317 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmg22\" (UniqueName: \"kubernetes.io/projected/926d4b75-3809-4fce-89e7-8076befa3b1b-kube-api-access-mmg22\") pod \"marketplace-operator-79b997595-nmcnl\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.863881 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.863985 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.36396291 +0000 UTC m=+35.434742506 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.864180 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.864602 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.364587805 +0000 UTC m=+35.435367401 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.879718 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nk9v6\" (UniqueName: \"kubernetes.io/projected/b3670533-6639-4b4f-88f4-15ebb2f2b8be-kube-api-access-nk9v6\") pod \"migrator-59844c95c7-xd6tv\" (UID: \"b3670533-6639-4b4f-88f4-15ebb2f2b8be\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.898973 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m55t2\" (UniqueName: \"kubernetes.io/projected/b9fcc600-c510-4fc7-a539-02ff3f235ee4-kube-api-access-m55t2\") pod \"csi-hostpathplugin-2s9b9\" (UID: \"b9fcc600-c510-4fc7-a539-02ff3f235ee4\") " pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.922254 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92gl9\" (UniqueName: \"kubernetes.io/projected/37cb9609-c336-4c6b-86d5-1d680695f08e-kube-api-access-92gl9\") pod \"cni-sysctl-allowlist-ds-snc68\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.922612 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.939870 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.946969 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/03fbacdb-e497-4e45-af5d-04099435b951-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-wpxdt\" (UID: \"03fbacdb-e497-4e45-af5d-04099435b951\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.958210 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.965428 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.965608 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.465573799 +0000 UTC m=+35.536353385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.965839 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.965987 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9f7c\" (UniqueName: \"kubernetes.io/projected/903b0a02-6ac7-420c-9ba5-3d9562d57168-kube-api-access-r9f7c\") pod \"catalog-operator-68c6474976-9zzr5\" (UID: \"903b0a02-6ac7-420c-9ba5-3d9562d57168\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:44 crc kubenswrapper[4788]: E1211 09:21:44.966307 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.466288596 +0000 UTC m=+35.537068182 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.966669 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.980301 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkvsg\" (UniqueName: \"kubernetes.io/projected/8b8e3455-a9d8-480a-b829-665bb41d5bb9-kube-api-access-rkvsg\") pod \"olm-operator-6b444d44fb-k6hvj\" (UID: \"8b8e3455-a9d8-480a-b829-665bb41d5bb9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.984021 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz"] Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.992872 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" Dec 11 09:21:44 crc kubenswrapper[4788]: I1211 09:21:44.997329 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvfmz\" (UniqueName: \"kubernetes.io/projected/8f523f4a-d652-40cc-9613-5058118a5c79-kube-api-access-xvfmz\") pod \"dns-operator-744455d44c-c7fld\" (UID: \"8f523f4a-d652-40cc-9613-5058118a5c79\") " pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.000183 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.010718 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.017113 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.017418 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8x64\" (UniqueName: \"kubernetes.io/projected/7ce8b184-b7bf-4e9e-96e5-b8a116493df7-kube-api-access-s8x64\") pod \"dns-default-7kz7q\" (UID: \"7ce8b184-b7bf-4e9e-96e5-b8a116493df7\") " pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.029595 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.066710 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.066880 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.566851589 +0000 UTC m=+35.637631185 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.067092 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.067748 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.567717531 +0000 UTC m=+35.638497297 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.085082 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.098959 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.111331 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69sj6\" (UniqueName: \"kubernetes.io/projected/19a593cb-a446-4977-9235-4b4036d2f2f8-kube-api-access-69sj6\") pod \"controller-manager-879f6c89f-z97fq\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.114069 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.118947 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g574q\" (UID: \"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.125098 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" event={"ID":"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a","Type":"ContainerStarted","Data":"e98ffba9d03a7805400f7e243369cb00863143ac74136c74296f43e919b144d9"} Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.126317 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ps97\" (UniqueName: \"kubernetes.io/projected/f414d905-192d-42bc-b77c-0d05b5ff8b2f-kube-api-access-6ps97\") pod \"machine-config-server-c9cxs\" (UID: \"f414d905-192d-42bc-b77c-0d05b5ff8b2f\") " pod="openshift-machine-config-operator/machine-config-server-c9cxs" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.127734 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" event={"ID":"cd4a1f90-9ac0-41cc-b980-91964f48715d","Type":"ContainerStarted","Data":"303a2064f0090d8bc000b0181f0d6abd6952fcb1dcc614905e8697b60d9608bd"} Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.129213 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6xr2p" event={"ID":"90a71b66-dd01-4895-8b06-98a8f650cabc","Type":"ContainerStarted","Data":"fa26223ad27ac3ef120b3645d80245f03cdb7ed60e83555f7ae0ef2d07ae899c"} Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.136417 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" event={"ID":"19c4e640-d1ed-47db-83f6-4e4656b7138b","Type":"ContainerStarted","Data":"da38d7042ce0cf9746c7a510a5bac5486bb0a4cf85199ee3989f463df6684a87"} Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.137779 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" event={"ID":"0783fcec-bf1b-4910-b8c2-08d85c53093a","Type":"ContainerStarted","Data":"a321e1ec8c06a10cc86eaeae042721055c6989f9170a50d7c16f32a3ccd1cd8e"} Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.139875 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" event={"ID":"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e","Type":"ContainerStarted","Data":"e3960a8c985f4ab5b486149884f156bd32bb7f5ce623bbcee2b7056c1bc505dd"} Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.140913 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jdvlj" event={"ID":"2d7342e7-facb-49b4-adee-0e6e25c9fa8e","Type":"ContainerStarted","Data":"2d967030b3c8c9b46c87e3ddac89586e700c60edad3cf24e71ed01c8e6cc41e4"} Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.168824 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.169373 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.66935035 +0000 UTC m=+35.740129936 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:45 crc kubenswrapper[4788]: W1211 09:21:45.206452 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc12131a2_d938_4bab_a3f2_b82205ea5a1b.slice/crio-69252588245d1ca364f868c9e76b5fcadb21b2b54b8391f5deb476ac51a190df WatchSource:0}: Error finding container 69252588245d1ca364f868c9e76b5fcadb21b2b54b8391f5deb476ac51a190df: Status 404 returned error can't find the container with id 69252588245d1ca364f868c9e76b5fcadb21b2b54b8391f5deb476ac51a190df Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.231892 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.249862 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.271131 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.271527 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.771508742 +0000 UTC m=+35.842288328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.279172 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.284358 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-c9cxs" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.326598 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.358677 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m"] Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.359289 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tlz6t"] Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.372503 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.372891 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.872870125 +0000 UTC m=+35.943649711 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.389709 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz"] Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.475068 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.475270 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.475367 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.475934 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:45.97591794 +0000 UTC m=+36.046697526 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.478439 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.478804 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx"] Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.492298 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:45 crc kubenswrapper[4788]: W1211 09:21:45.508696 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd393854b_5612_4686_8a9f_c6820b3eee5f.slice/crio-f8fd690998d2d614f1240e6d5c670c32cf88462d318010d11b3f070c1f41a764 WatchSource:0}: Error finding container f8fd690998d2d614f1240e6d5c670c32cf88462d318010d11b3f070c1f41a764: Status 404 returned error can't find the container with id f8fd690998d2d614f1240e6d5c670c32cf88462d318010d11b3f070c1f41a764 Dec 11 09:21:45 crc kubenswrapper[4788]: W1211 09:21:45.529200 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0469afa_8902_418c_8545_f5ad9ed2b86b.slice/crio-9434d8921e513eb288d800de625d938ad474fb3b4fcf17185ba679be1315a537 WatchSource:0}: Error finding container 9434d8921e513eb288d800de625d938ad474fb3b4fcf17185ba679be1315a537: Status 404 returned error can't find the container with id 9434d8921e513eb288d800de625d938ad474fb3b4fcf17185ba679be1315a537 Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.549531 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms"] Dec 11 09:21:45 crc kubenswrapper[4788]: W1211 09:21:45.550999 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35224190_488f_497f_9ac7_2d37339c9b71.slice/crio-0c6d6a93f9877bee5d848b3e46fdc3232041557786c85165b7711e1476131895 WatchSource:0}: Error finding container 0c6d6a93f9877bee5d848b3e46fdc3232041557786c85165b7711e1476131895: Status 404 returned error can't find the container with id 0c6d6a93f9877bee5d848b3e46fdc3232041557786c85165b7711e1476131895 Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.576672 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.576843 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.576982 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:46.076952065 +0000 UTC m=+36.147731651 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.578316 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.578491 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.579693 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:46.079677293 +0000 UTC m=+36.150456929 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.582881 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.586068 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.638615 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t"] Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.676776 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4"] Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.681387 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.681728 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:46.181711202 +0000 UTC m=+36.252490788 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.681750 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9n777"] Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.724631 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.724638 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.730786 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.744353 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dstfn"] Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.782468 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.783041 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:46.283022974 +0000 UTC m=+36.353802560 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:45 crc kubenswrapper[4788]: W1211 09:21:45.839607 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb39d101c_fbdd_427c_9369_cbfde9bb50cd.slice/crio-67e3955e350d7fa83710f2ed62c256a6b192237f8860ff57ddbb3a35437b013e WatchSource:0}: Error finding container 67e3955e350d7fa83710f2ed62c256a6b192237f8860ff57ddbb3a35437b013e: Status 404 returned error can't find the container with id 67e3955e350d7fa83710f2ed62c256a6b192237f8860ff57ddbb3a35437b013e Dec 11 09:21:45 crc kubenswrapper[4788]: W1211 09:21:45.850802 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddcfe9e49_8116_4af5_84db_b958e5d3104b.slice/crio-322c79df6a86fcfa5abbb92118d4535a4d502980ca640e87869e12431848dab7 WatchSource:0}: Error finding container 322c79df6a86fcfa5abbb92118d4535a4d502980ca640e87869e12431848dab7: Status 404 returned error can't find the container with id 322c79df6a86fcfa5abbb92118d4535a4d502980ca640e87869e12431848dab7 Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.885671 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.886161 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:46.386140461 +0000 UTC m=+36.456920047 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:45 crc kubenswrapper[4788]: I1211 09:21:45.986983 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:45 crc kubenswrapper[4788]: E1211 09:21:45.987767 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:46.487749349 +0000 UTC m=+36.558528935 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.087822 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:46 crc kubenswrapper[4788]: E1211 09:21:46.088455 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:46.588441996 +0000 UTC m=+36.659221582 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.180199 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" event={"ID":"19c4e640-d1ed-47db-83f6-4e4656b7138b","Type":"ContainerStarted","Data":"a806e629c9101350e69b60452903ffb61ea24af3479b36c4d435a3186d85d709"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.190977 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.190992 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" event={"ID":"37cb9609-c336-4c6b-86d5-1d680695f08e","Type":"ContainerStarted","Data":"fd7698abaab57d0fa7cc0a55bcd6e2338ff970a41095f259fd745a6c63305518"} Dec 11 09:21:46 crc kubenswrapper[4788]: E1211 09:21:46.191631 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:46.691609374 +0000 UTC m=+36.762388960 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.212521 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-snxvv" podStartSLOduration=17.212487745 podStartE2EDuration="17.212487745s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:46.203824499 +0000 UTC m=+36.274604085" watchObservedRunningTime="2025-12-11 09:21:46.212487745 +0000 UTC m=+36.283267331" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.213667 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jdvlj" event={"ID":"2d7342e7-facb-49b4-adee-0e6e25c9fa8e","Type":"ContainerStarted","Data":"3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.250097 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-jdvlj" podStartSLOduration=17.250070575 podStartE2EDuration="17.250070575s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:46.245455429 +0000 UTC m=+36.316235035" watchObservedRunningTime="2025-12-11 09:21:46.250070575 +0000 UTC m=+36.320850161" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.256635 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6xr2p" event={"ID":"90a71b66-dd01-4895-8b06-98a8f650cabc","Type":"ContainerStarted","Data":"f072cfd58b038325a26fc1e7b44e7136b71d41a8dd6c332dd75a16fb18c21e4e"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.262441 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-gstnh" event={"ID":"0f6378ba-bda0-4e50-a126-29c23361a240","Type":"ContainerStarted","Data":"5cf744946d7a6cb07a34188fc02254a7e6b0f9f5b06ce2c8574b89d398564934"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.262491 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-gstnh" event={"ID":"0f6378ba-bda0-4e50-a126-29c23361a240","Type":"ContainerStarted","Data":"31a4856d4b1fc17371283c1a4a26d6c6dc15aa1d1db1612b2f248b681cc0d9f1"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.262511 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.266110 4788 patch_prober.go:28] interesting pod/console-operator-58897d9998-gstnh container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.266170 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-gstnh" podUID="0f6378ba-bda0-4e50-a126-29c23361a240" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.284031 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-6xr2p" podStartSLOduration=16.284006523 podStartE2EDuration="16.284006523s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:46.280454504 +0000 UTC m=+36.351234110" watchObservedRunningTime="2025-12-11 09:21:46.284006523 +0000 UTC m=+36.354786109" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.285882 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.285937 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" event={"ID":"cd4a1f90-9ac0-41cc-b980-91964f48715d","Type":"ContainerStarted","Data":"bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.286977 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.289278 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" event={"ID":"7e2cf7af-6694-45e9-8d9d-39fce413ba67","Type":"ContainerStarted","Data":"80b92fa0366c287132b3b145da0e81814562fd66766db6ff8ad906fe387f1d63"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.290112 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" event={"ID":"d9dc8716-98a0-4154-b72b-24fac0d3ebf1","Type":"ContainerStarted","Data":"50cf0c112f9dd2fee7250992e504d0d680f6fd9e627b1085203465667b55bda7"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.291711 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.293295 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" event={"ID":"c12131a2-d938-4bab-a3f2-b82205ea5a1b","Type":"ContainerStarted","Data":"b9b14d67c366d17fefd600ed81c369d56ec80e2982aff7cc497d7df4d4c50386"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.293320 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" event={"ID":"c12131a2-d938-4bab-a3f2-b82205ea5a1b","Type":"ContainerStarted","Data":"69252588245d1ca364f868c9e76b5fcadb21b2b54b8391f5deb476ac51a190df"} Dec 11 09:21:46 crc kubenswrapper[4788]: E1211 09:21:46.293645 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:46.793630513 +0000 UTC m=+36.864410099 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.295641 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" event={"ID":"0783fcec-bf1b-4910-b8c2-08d85c53093a","Type":"ContainerStarted","Data":"c45ab68415c58f54738a28d9d75b1ac64b1f34e19b74907bd2a8c714897793af"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.299981 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" event={"ID":"6806d62e-080e-4c44-a25e-abfd4baa858e","Type":"ContainerStarted","Data":"61ce0cff0866e646dc189d16ce3646c00b92b220821c9417e4c80e042fa9303d"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.300030 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" event={"ID":"6806d62e-080e-4c44-a25e-abfd4baa858e","Type":"ContainerStarted","Data":"10948dd73e1eb9fa2df85e4e560c93bb16594e0e6b35ca337299f63da1cffe38"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.303249 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-gstnh" podStartSLOduration=17.303211372 podStartE2EDuration="17.303211372s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:46.30313098 +0000 UTC m=+36.373910576" watchObservedRunningTime="2025-12-11 09:21:46.303211372 +0000 UTC m=+36.373990958" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.306508 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" event={"ID":"35224190-488f-497f-9ac7-2d37339c9b71","Type":"ContainerStarted","Data":"0c6d6a93f9877bee5d848b3e46fdc3232041557786c85165b7711e1476131895"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.308735 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dstfn" event={"ID":"dcfe9e49-8116-4af5-84db-b958e5d3104b","Type":"ContainerStarted","Data":"322c79df6a86fcfa5abbb92118d4535a4d502980ca640e87869e12431848dab7"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.310163 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" event={"ID":"c0469afa-8902-418c-8545-f5ad9ed2b86b","Type":"ContainerStarted","Data":"9434d8921e513eb288d800de625d938ad474fb3b4fcf17185ba679be1315a537"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.312179 4788 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-7qxsh container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.312289 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" podUID="cd4a1f90-9ac0-41cc-b980-91964f48715d" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.315528 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" event={"ID":"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1","Type":"ContainerStarted","Data":"480413980d84e91f7ab57c5b02d5d237af149e4b249382fe55e8ed28deef7dd3"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.317274 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4" event={"ID":"b39d101c-fbdd-427c-9369-cbfde9bb50cd","Type":"ContainerStarted","Data":"67e3955e350d7fa83710f2ed62c256a6b192237f8860ff57ddbb3a35437b013e"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.318904 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-c9cxs" event={"ID":"f414d905-192d-42bc-b77c-0d05b5ff8b2f","Type":"ContainerStarted","Data":"f239ed461e5e712dc8fdddfb9a65b80eef82c416c9e17e55488cf7c4e32fc0c5"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.323491 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" event={"ID":"efb6f672-100b-4da6-951e-7fd6326353a6","Type":"ContainerStarted","Data":"5ea79c8674be5ad747fd24ac0b23ecc9f38817c67131823134154e31f216220a"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.325257 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" podStartSLOduration=16.325214662 podStartE2EDuration="16.325214662s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:46.322538815 +0000 UTC m=+36.393318411" watchObservedRunningTime="2025-12-11 09:21:46.325214662 +0000 UTC m=+36.395994248" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.332680 4788 generic.go:334] "Generic (PLEG): container finished" podID="93f3b80a-e2ab-4f3c-87e8-806e5110cf9a" containerID="2c7b9832e82f19b33782d8e3e7ff8e019704dd57d71a581784e23344f3de3527" exitCode=0 Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.332745 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" event={"ID":"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a","Type":"ContainerDied","Data":"2c7b9832e82f19b33782d8e3e7ff8e019704dd57d71a581784e23344f3de3527"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.335302 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" event={"ID":"d393854b-5612-4686-8a9f-c6820b3eee5f","Type":"ContainerStarted","Data":"f8fd690998d2d614f1240e6d5c670c32cf88462d318010d11b3f070c1f41a764"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.338383 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" event={"ID":"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e","Type":"ContainerStarted","Data":"6ed9b4d07b5092b1bf336e1ee6f9fbfbd21e1afe3946caf4a600ab36119f1ee0"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.339362 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" event={"ID":"34a801ac-98e6-4a11-b743-ed43c228ce05","Type":"ContainerStarted","Data":"a5fd88256f182055d74b1959426920aa077c842c30a16259843578b707c1fe98"} Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.393563 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:46 crc kubenswrapper[4788]: E1211 09:21:46.397101 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:46.897084638 +0000 UTC m=+36.967864324 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.494641 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:46 crc kubenswrapper[4788]: E1211 09:21:46.495147 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:46.995127968 +0000 UTC m=+37.065907554 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.521448 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:46 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:46 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:46 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.521513 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.596751 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:46 crc kubenswrapper[4788]: E1211 09:21:46.597376 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:47.097363313 +0000 UTC m=+37.168142899 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.698584 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:46 crc kubenswrapper[4788]: E1211 09:21:46.698896 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:47.198880859 +0000 UTC m=+37.269660445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.707832 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7kz7q"] Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.722000 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk"] Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.799596 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:46 crc kubenswrapper[4788]: E1211 09:21:46.800982 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:47.30096801 +0000 UTC m=+37.371747606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.900967 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:46 crc kubenswrapper[4788]: E1211 09:21:46.901159 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:47.401133633 +0000 UTC m=+37.471913219 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:46 crc kubenswrapper[4788]: I1211 09:21:46.901718 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:46 crc kubenswrapper[4788]: E1211 09:21:46.902066 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:47.402051296 +0000 UTC m=+37.472830882 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.007042 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.007713 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:47.507686706 +0000 UTC m=+37.578466292 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.111477 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.112326 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:47.61229437 +0000 UTC m=+37.683073956 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.191784 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.212524 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.225602 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:47.7255661 +0000 UTC m=+37.796345696 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.266021 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-2s9b9"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.288752 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:47 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:47 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:47 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.288837 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.291669 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.297307 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fndxq"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.297399 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xm9p5"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.300248 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-khc6m"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.310613 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zbjxm"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.315497 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.315906 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:47.815890547 +0000 UTC m=+37.886670143 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.319302 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.326053 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.331249 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.337193 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.338681 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.351849 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" event={"ID":"3e7837c9-fcd6-4c09-bc3c-f361ce835fce","Type":"ContainerStarted","Data":"ddfd7634fb69dfd4950a4a7a2b0e473a7f8e2dc6eb392fa141dab94f246ee820"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.358477 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" event={"ID":"d9dc8716-98a0-4154-b72b-24fac0d3ebf1","Type":"ContainerStarted","Data":"6c0051fc92ce328ded27e9d7e5af2a579d1ec3bd74897a2cf4eefc8acaae7498"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.371355 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.377878 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" event={"ID":"a71f8b6c-2868-4df6-bafe-a1dc5a0a751e","Type":"ContainerStarted","Data":"9633aed940c8b7691c8c84c6c13039452fbbccf999b0a2f779608aba1ca4aaff"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.380090 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-c7fld"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.390191 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7kz7q" event={"ID":"7ce8b184-b7bf-4e9e-96e5-b8a116493df7","Type":"ContainerStarted","Data":"908a7a0a5fbed6046152d136d3176977c32390aa3c5ade8b153e9f32391d792e"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.402256 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4" event={"ID":"b39d101c-fbdd-427c-9369-cbfde9bb50cd","Type":"ContainerStarted","Data":"5321ddfbb955522c49e7b2b7376613c94ade0a8f6fdde30d99bf2396af3d9aea"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.427948 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.428422 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:47.928405218 +0000 UTC m=+37.999184804 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.431947 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-v858r" podStartSLOduration=17.431928147 podStartE2EDuration="17.431928147s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:47.402101741 +0000 UTC m=+37.472881327" watchObservedRunningTime="2025-12-11 09:21:47.431928147 +0000 UTC m=+37.502707733" Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.433328 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.494123 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" event={"ID":"4fb89c3c-e4d1-4365-bff9-d2d010030605","Type":"ContainerStarted","Data":"dfe76d9ed459e504b78efafae374a067455d9fce8a62520de7c637f10e93544d"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.515545 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" event={"ID":"49f4cfc5-22e9-4bff-9fde-a9a9d14b60f1","Type":"ContainerStarted","Data":"fddf9ca81b1cfa3400b432bdf52d1e46ffb353197481e863c2a35030a4f7d811"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.531662 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.531959 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.031947196 +0000 UTC m=+38.102726782 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.562278 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5ztq4" podStartSLOduration=17.562254793 podStartE2EDuration="17.562254793s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:47.516654694 +0000 UTC m=+37.587434280" watchObservedRunningTime="2025-12-11 09:21:47.562254793 +0000 UTC m=+37.633034369" Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.564675 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.581540 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.620873 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-c9cxs" event={"ID":"f414d905-192d-42bc-b77c-0d05b5ff8b2f","Type":"ContainerStarted","Data":"8abbfe986a4bb64ee44446b1333883e47bf0e8b6ead1c627aeb6bea93c2471d4"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.642315 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.644011 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.143940514 +0000 UTC m=+38.214720100 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.644105 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g574q" podStartSLOduration=18.644086658 podStartE2EDuration="18.644086658s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:47.607142435 +0000 UTC m=+37.677922041" watchObservedRunningTime="2025-12-11 09:21:47.644086658 +0000 UTC m=+37.714866244" Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.645321 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-z97fq"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.650783 4788 generic.go:334] "Generic (PLEG): container finished" podID="c12131a2-d938-4bab-a3f2-b82205ea5a1b" containerID="b9b14d67c366d17fefd600ed81c369d56ec80e2982aff7cc497d7df4d4c50386" exitCode=0 Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.650987 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" event={"ID":"c12131a2-d938-4bab-a3f2-b82205ea5a1b","Type":"ContainerDied","Data":"b9b14d67c366d17fefd600ed81c369d56ec80e2982aff7cc497d7df4d4c50386"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.691179 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" event={"ID":"0783fcec-bf1b-4910-b8c2-08d85c53093a","Type":"ContainerStarted","Data":"62729e05d1bc57b29768164570275910616ae5ea87572be7091544ef3558c0d5"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.699253 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nmcnl"] Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.739740 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" event={"ID":"6806d62e-080e-4c44-a25e-abfd4baa858e","Type":"ContainerStarted","Data":"f4d83bcd435b292fdee3f9bc42864ee6787b4a60da48ce4a01210967fc1192c2"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.756343 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.759165 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.259131543 +0000 UTC m=+38.329911129 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.779831 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" event={"ID":"35224190-488f-497f-9ac7-2d37339c9b71","Type":"ContainerStarted","Data":"06fa542baf82858d2ac4ba555089470d5c7b1523c8f9f6d6fbc6d0e290ed5562"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.799925 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" event={"ID":"c0469afa-8902-418c-8545-f5ad9ed2b86b","Type":"ContainerStarted","Data":"d2583e4e7b0d3733e08be456b5c7c07f3e1336cf79ae862bc572ec4e72324ab0"} Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.830974 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.838699 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-c9cxs" podStartSLOduration=6.83868383 podStartE2EDuration="6.83868383s" podCreationTimestamp="2025-12-11 09:21:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:47.838315111 +0000 UTC m=+37.909094687" watchObservedRunningTime="2025-12-11 09:21:47.83868383 +0000 UTC m=+37.909463416" Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.839331 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" podStartSLOduration=17.839324106 podStartE2EDuration="17.839324106s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:47.712546379 +0000 UTC m=+37.783325975" watchObservedRunningTime="2025-12-11 09:21:47.839324106 +0000 UTC m=+37.910103692" Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.842671 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-gstnh" Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.860599 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.860825 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.360791733 +0000 UTC m=+38.431571319 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.864665 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.865107 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.36509601 +0000 UTC m=+38.435875596 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.875946 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-47nzd" podStartSLOduration=17.875930431 podStartE2EDuration="17.875930431s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:47.875256094 +0000 UTC m=+37.946035690" watchObservedRunningTime="2025-12-11 09:21:47.875930431 +0000 UTC m=+37.946710017" Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.958247 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-52mwx" podStartSLOduration=17.958212237 podStartE2EDuration="17.958212237s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:47.956369511 +0000 UTC m=+38.027149117" watchObservedRunningTime="2025-12-11 09:21:47.958212237 +0000 UTC m=+38.028991823" Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.967920 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.968321 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.468295479 +0000 UTC m=+38.539075065 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.968756 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:47 crc kubenswrapper[4788]: E1211 09:21:47.972984 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.472971506 +0000 UTC m=+38.543751092 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:47 crc kubenswrapper[4788]: I1211 09:21:47.996432 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-sj54m" podStartSLOduration=18.996406602 podStartE2EDuration="18.996406602s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:47.99594206 +0000 UTC m=+38.066721656" watchObservedRunningTime="2025-12-11 09:21:47.996406602 +0000 UTC m=+38.067186188" Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.050351 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-z2r9q" podStartSLOduration=19.050328469 podStartE2EDuration="19.050328469s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:48.050048152 +0000 UTC m=+38.120827738" watchObservedRunningTime="2025-12-11 09:21:48.050328469 +0000 UTC m=+38.121108055" Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.071062 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.071274 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.571213791 +0000 UTC m=+38.641993377 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.071428 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.071745 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.571732314 +0000 UTC m=+38.642511900 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.172375 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.172609 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.672567453 +0000 UTC m=+38.743347039 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.173165 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.177178 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.677158988 +0000 UTC m=+38.747938574 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.275621 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.275977 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.775944987 +0000 UTC m=+38.846724583 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.286108 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:48 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:48 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:48 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.286191 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.382471 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.383014 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.882994942 +0000 UTC m=+38.953774528 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.486032 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.486397 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:48.986378055 +0000 UTC m=+39.057157641 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.588966 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.589350 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.089336548 +0000 UTC m=+39.160116134 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.691500 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.691778 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.191743906 +0000 UTC m=+39.262523492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.692318 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.692699 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.192685729 +0000 UTC m=+39.263465315 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.793814 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.794112 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.294095713 +0000 UTC m=+39.364875299 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.895089 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:48 crc kubenswrapper[4788]: E1211 09:21:48.895692 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.395674461 +0000 UTC m=+39.466454047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.931534 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" event={"ID":"c12131a2-d938-4bab-a3f2-b82205ea5a1b","Type":"ContainerStarted","Data":"ea59ee245438bf38d1744acfb0f1454435867aaf263202783e448cb95bbc3bab"} Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.931654 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.970745 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" podStartSLOduration=19.970725077 podStartE2EDuration="19.970725077s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:48.968881791 +0000 UTC m=+39.039661397" watchObservedRunningTime="2025-12-11 09:21:48.970725077 +0000 UTC m=+39.041504653" Dec 11 09:21:48 crc kubenswrapper[4788]: I1211 09:21:48.989705 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"782d4e40ede2ab982b791a26337f9a8dc99c3dfc571e1ee864441c7417679d45"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.002194 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.003072 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.503019554 +0000 UTC m=+39.573799140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.003410 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.004008 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.503983298 +0000 UTC m=+39.574762884 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.009332 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" event={"ID":"34a801ac-98e6-4a11-b743-ed43c228ce05","Type":"ContainerStarted","Data":"2e42155f4d876e12c809ae6077b4f9d919e3eaf76a4169e34121a462b98e2357"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.020775 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" event={"ID":"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d","Type":"ContainerStarted","Data":"2a7630e78f4bc2f97ca6048d582f7616b0c3471774e25fe8dd049b9dc9f82c1c"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.040939 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" event={"ID":"37cb9609-c336-4c6b-86d5-1d680695f08e","Type":"ContainerStarted","Data":"2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.041531 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.049727 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" event={"ID":"6fc63325-1134-4106-aa40-35d3ec59fc5a","Type":"ContainerStarted","Data":"5ebd5900b920dd8123cb2d07c42b322bd044152eec879a42b38dbe285a85ff13"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.052846 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv" event={"ID":"b3670533-6639-4b4f-88f4-15ebb2f2b8be","Type":"ContainerStarted","Data":"56ee07744451383a027f80e8387709c81d1daff13b0daa42c66ebddfa3c74e8d"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.052892 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv" event={"ID":"b3670533-6639-4b4f-88f4-15ebb2f2b8be","Type":"ContainerStarted","Data":"d2efe418c1121cd61212f77e9b614d75539de355781209c1efc094481187494e"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.059140 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" event={"ID":"03fbacdb-e497-4e45-af5d-04099435b951","Type":"ContainerStarted","Data":"4b251968d4859d356e06ffa835b276634f2ed162f7efe25b06a70e38fede7e4d"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.065387 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" event={"ID":"b9fcc600-c510-4fc7-a539-02ff3f235ee4","Type":"ContainerStarted","Data":"47ed6acbba554fdcd090a5fcb622c657b2fbd3c6cbb310bd0bd1f2ad0ff3a326"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.083259 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" event={"ID":"d393854b-5612-4686-8a9f-c6820b3eee5f","Type":"ContainerStarted","Data":"4e8e0c498afd36c78781bcc3dad54c534204897706ba0bf5c19e8aa7c01f0827"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.100051 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" podStartSLOduration=8.099901175 podStartE2EDuration="8.099901175s" podCreationTimestamp="2025-12-11 09:21:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:49.098649423 +0000 UTC m=+39.169429009" watchObservedRunningTime="2025-12-11 09:21:49.099901175 +0000 UTC m=+39.170680771" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.101057 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kr56t" podStartSLOduration=19.101045173 podStartE2EDuration="19.101045173s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:49.0748932 +0000 UTC m=+39.145672786" watchObservedRunningTime="2025-12-11 09:21:49.101045173 +0000 UTC m=+39.171824769" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.104731 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.106336 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.606314475 +0000 UTC m=+39.677094061 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.120941 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dstfn" event={"ID":"dcfe9e49-8116-4af5-84db-b958e5d3104b","Type":"ContainerStarted","Data":"94ea41769e2eb0ac4421ad72e5878e20e38e5c45fe7c42d2151bec269095c39c"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.121928 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-dstfn" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.135981 4788 patch_prober.go:28] interesting pod/downloads-7954f5f757-dstfn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.136054 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dstfn" podUID="dcfe9e49-8116-4af5-84db-b958e5d3104b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.154451 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" event={"ID":"903b0a02-6ac7-420c-9ba5-3d9562d57168","Type":"ContainerStarted","Data":"db2a2aae96be481c0616fde589765b5f9ba09dc51d52d36c4304ddb79f272d3f"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.154500 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" event={"ID":"903b0a02-6ac7-420c-9ba5-3d9562d57168","Type":"ContainerStarted","Data":"2e7bc729b8ac65835460ec7e60c46041531a5a58a66387fadcc57f645557274f"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.155322 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.157472 4788 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-9zzr5 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.157534 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" podUID="903b0a02-6ac7-420c-9ba5-3d9562d57168" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.167215 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" event={"ID":"09f0b73c-84d0-4165-8bef-45d954fcad4c","Type":"ContainerStarted","Data":"bd6df28088e4fccec126caa3cbb372a5a6cc34306ed51671cf79a2e9f9db2cb9"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.168341 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-dstfn" podStartSLOduration=19.168315814 podStartE2EDuration="19.168315814s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:49.165939705 +0000 UTC m=+39.236719291" watchObservedRunningTime="2025-12-11 09:21:49.168315814 +0000 UTC m=+39.239095400" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.208817 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.209308 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.709288638 +0000 UTC m=+39.780068224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.214609 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7kz7q" event={"ID":"7ce8b184-b7bf-4e9e-96e5-b8a116493df7","Type":"ContainerStarted","Data":"09711d0486f053f9e96501fe3ad633a89ec204ec10509c0ed7afc5fd5b0f0e50"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.217599 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" podStartSLOduration=19.217569895 podStartE2EDuration="19.217569895s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:49.215067642 +0000 UTC m=+39.285847228" watchObservedRunningTime="2025-12-11 09:21:49.217569895 +0000 UTC m=+39.288349481" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.217975 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" event={"ID":"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd","Type":"ContainerStarted","Data":"7f92b0b277af6e311a1ad8251ec05359007755c289cc3d9ec2c1d1c18c93b35a"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.218005 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" event={"ID":"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd","Type":"ContainerStarted","Data":"9e66dc374348f3c7f47c7f7859d0207cfc7153f36eddc93143cedb54f2f4791c"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.224101 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" event={"ID":"01badcd8-dec7-4e94-8016-1ccc6a06a7cc","Type":"ContainerStarted","Data":"7ebb8e8593fb0566938322eb2d9c8c5ff2844a51bc3d00c6ea8cfacbb8cff182"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.254075 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.255503 4788 generic.go:334] "Generic (PLEG): container finished" podID="7e2cf7af-6694-45e9-8d9d-39fce413ba67" containerID="75deb46050e8b145e87c068ba57c12f37a484df49c1cd7217559283ea251f07f" exitCode=0 Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.255630 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" event={"ID":"7e2cf7af-6694-45e9-8d9d-39fce413ba67","Type":"ContainerDied","Data":"75deb46050e8b145e87c068ba57c12f37a484df49c1cd7217559283ea251f07f"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.265162 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" event={"ID":"efb6f672-100b-4da6-951e-7fd6326353a6","Type":"ContainerStarted","Data":"6d3dfae0ec551f0449fce345a073c2f2eee540b542cc6910d82d1135edfe2977"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.294633 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:49 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:49 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:49 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.294719 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.296134 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" event={"ID":"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a","Type":"ContainerStarted","Data":"16fefe4192e61f7cb03b683b49c2139ad0769988447728a688b40c6c01fb6720"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.311208 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.311400 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.811371219 +0000 UTC m=+39.882150805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.311536 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.313289 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.813276476 +0000 UTC m=+39.884056052 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.344986 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"f291fff619a3adc9c047bcc54b7571623ccc8a1fbc27424c8fbff16a9a5a0819"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.378544 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" event={"ID":"19a593cb-a446-4977-9235-4b4036d2f2f8","Type":"ContainerStarted","Data":"78ad6a8088ac412191e07e577615dde9fabaa07bfc182b6bc6e6736eabec992a"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.406169 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" event={"ID":"926d4b75-3809-4fce-89e7-8076befa3b1b","Type":"ContainerStarted","Data":"a7f71d240bb161218a4267d5f897506fcf515754b58a96bc1ac0d2d16a4b5470"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.428279 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.428494 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs\") pod \"network-metrics-daemon-ddsq9\" (UID: \"d8564117-b311-48b1-810b-5c95106cf868\") " pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.429269 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:49.929218523 +0000 UTC m=+39.999998109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.443886 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"3cf802ecbc3b097ab79745281da37c7cbfb9b2bec8ed15ffba32a4a8a2b7c518"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.470646 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d8564117-b311-48b1-810b-5c95106cf868-metrics-certs\") pod \"network-metrics-daemon-ddsq9\" (UID: \"d8564117-b311-48b1-810b-5c95106cf868\") " pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.525715 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" event={"ID":"8549231d-3a38-4403-aa63-e8f9090db6ef","Type":"ContainerStarted","Data":"fc14835aa7f67d3f31c92639cdba5d3f49fc3aa2261396ff31d173e7a755bbb3"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.529963 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.530655 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.030632347 +0000 UTC m=+40.101411934 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.539910 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" event={"ID":"fdaa8c11-2c38-410e-a3f8-c81bbccf25ce","Type":"ContainerStarted","Data":"95612dd35cfbd1cdda3a698d6f13b68e94293bcb04d05cd05cdc2fccf13288d0"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.556556 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" event={"ID":"3e7837c9-fcd6-4c09-bc3c-f361ce835fce","Type":"ContainerStarted","Data":"a215b61e1071a6c8176bf8694de0a4191ee4b0b0fa7d581fc27f5ee4a750c3c8"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.572619 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" event={"ID":"d9dc8716-98a0-4154-b72b-24fac0d3ebf1","Type":"ContainerStarted","Data":"c0a0d7c178088f2885e1ac181f2eeb8fe9aab3f5523b633aba97e33461300d15"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.578825 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" event={"ID":"8f523f4a-d652-40cc-9613-5058118a5c79","Type":"ContainerStarted","Data":"4923ac611c38ebcd9d2189d87dc425954d7ec7a78f7033d2f3a328f7e7c9276a"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.605769 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-ddsq9" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.606990 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-9n777" podStartSLOduration=19.606965355 podStartE2EDuration="19.606965355s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:49.442667559 +0000 UTC m=+39.513447145" watchObservedRunningTime="2025-12-11 09:21:49.606965355 +0000 UTC m=+39.677744941" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.631911 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.632540 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.132517613 +0000 UTC m=+40.203297209 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.661244 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vqzqt" podStartSLOduration=19.66120311 podStartE2EDuration="19.66120311s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:49.612516634 +0000 UTC m=+39.683296220" watchObservedRunningTime="2025-12-11 09:21:49.66120311 +0000 UTC m=+39.731982696" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.662543 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-cwpbz" podStartSLOduration=19.662536233 podStartE2EDuration="19.662536233s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:49.661794145 +0000 UTC m=+39.732573731" watchObservedRunningTime="2025-12-11 09:21:49.662536233 +0000 UTC m=+39.733315819" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.683219 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rkcgk" event={"ID":"4fb89c3c-e4d1-4365-bff9-d2d010030605","Type":"ContainerStarted","Data":"079ebe58ad00d8f4bcd7c42285baa3e8065c9d5c95621d8a5a4f8aceded83c52"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.698322 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" event={"ID":"e3f83673-a3d7-4c35-9935-4a0a505732ed","Type":"ContainerStarted","Data":"e972749ab2d53a833b5b62449b4e48b6e49975f8f961194530f85ce2b58d015a"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.698385 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" event={"ID":"e3f83673-a3d7-4c35-9935-4a0a505732ed","Type":"ContainerStarted","Data":"d21c2ebc88bea035a2e012a7b8bf418d9e171b0999868c10116e852ad149f296"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.728766 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-khc6m" event={"ID":"c35c93ec-8f41-4ded-b3ba-d2533e13de17","Type":"ContainerStarted","Data":"a8f1cd0bcdff3eaaccf254eb2c9ca7c2deb786bb5a5e4b67af64305b8e8e7f4d"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.734876 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.736639 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.236619985 +0000 UTC m=+40.307399581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.763459 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fvpdj" podStartSLOduration=19.763431005 podStartE2EDuration="19.763431005s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:49.760111742 +0000 UTC m=+39.830891338" watchObservedRunningTime="2025-12-11 09:21:49.763431005 +0000 UTC m=+39.834210591" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.787296 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" event={"ID":"6393ba4f-7df4-46a1-9156-42002708133c","Type":"ContainerStarted","Data":"b2c5e942e38161cdb37490ed9a4954888ee2e9786da972524ce6640a0b672e92"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.787541 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" event={"ID":"6393ba4f-7df4-46a1-9156-42002708133c","Type":"ContainerStarted","Data":"6d742d428bde32ef28d7c08b111160a2693de3c6e9d9073553b5e18e7506d857"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.830705 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" event={"ID":"8b8e3455-a9d8-480a-b829-665bb41d5bb9","Type":"ContainerStarted","Data":"2f1f2b7d51f9499361588aba3d6736748449b68f79c9ece2fb899aa1f1eeb56c"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.830766 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.830803 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" event={"ID":"8b8e3455-a9d8-480a-b829-665bb41d5bb9","Type":"ContainerStarted","Data":"0ca59c744233487adb387be5eacfd92cc91c2ff76c443d8c5606b9f01aaa13c7"} Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.835744 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.836962 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-khc6m" podStartSLOduration=8.836927911 podStartE2EDuration="8.836927911s" podCreationTimestamp="2025-12-11 09:21:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:49.815877395 +0000 UTC m=+39.886657001" watchObservedRunningTime="2025-12-11 09:21:49.836927911 +0000 UTC m=+39.907707497" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.849199 4788 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-k6hvj container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" start-of-body= Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.849581 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" podUID="8b8e3455-a9d8-480a-b829-665bb41d5bb9" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.851741 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.351710321 +0000 UTC m=+40.422489907 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.854982 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-xm9p5" podStartSLOduration=20.854955552 podStartE2EDuration="20.854955552s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:49.854052209 +0000 UTC m=+39.924831795" watchObservedRunningTime="2025-12-11 09:21:49.854955552 +0000 UTC m=+39.925735138" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.885881 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" podStartSLOduration=19.885861194 podStartE2EDuration="19.885861194s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:49.885044944 +0000 UTC m=+39.955824530" watchObservedRunningTime="2025-12-11 09:21:49.885861194 +0000 UTC m=+39.956640780" Dec 11 09:21:49 crc kubenswrapper[4788]: I1211 09:21:49.938085 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:49 crc kubenswrapper[4788]: E1211 09:21:49.942438 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.442420357 +0000 UTC m=+40.513199943 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.043267 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.043538 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.543490353 +0000 UTC m=+40.614269949 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.043630 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.044087 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.544080127 +0000 UTC m=+40.614859713 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.153379 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.153624 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.653580964 +0000 UTC m=+40.724360560 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.164116 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.164747 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.664730432 +0000 UTC m=+40.735510018 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.266838 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.267794 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.767770017 +0000 UTC m=+40.838549603 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.293338 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:50 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:50 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:50 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.293432 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.372106 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.372549 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.872533025 +0000 UTC m=+40.943312611 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.388546 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-snc68"] Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.474082 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.474681 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:50.974655677 +0000 UTC m=+41.045435263 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.573992 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7t5gz" Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.575920 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.576276 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:51.076264166 +0000 UTC m=+41.147043752 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.609753 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-ddsq9"] Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.677190 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.677406 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:51.177368782 +0000 UTC m=+41.248148358 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.677721 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.678097 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:51.17808781 +0000 UTC m=+41.248867396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.781016 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.781960 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:51.281942655 +0000 UTC m=+41.352722241 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.884619 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:50 crc kubenswrapper[4788]: E1211 09:21:50.885820 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:51.38579102 +0000 UTC m=+41.456570606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:50 crc kubenswrapper[4788]: I1211 09:21:50.986692 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ad463a1024182ba2318d8fbaca3dc17dd105c8dd75da9c345850f659eafbd720"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.001745 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.001973 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:51.501932002 +0000 UTC m=+41.572711598 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.002040 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.002806 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:51.502795724 +0000 UTC m=+41.573575310 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.015030 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" event={"ID":"6fc63325-1134-4106-aa40-35d3ec59fc5a","Type":"ContainerStarted","Data":"e84f55c13cbee4d41f9a0305140ca1921a9006713b13e80352d7f2e7c46c7e10"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.035752 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" event={"ID":"b9fcc600-c510-4fc7-a539-02ff3f235ee4","Type":"ContainerStarted","Data":"21861d5ab9327ff0363f54ccf28aa1ef7974edf65ad9363c0f8edcbec1b059c7"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.076678 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"71b45c62ec11087a94338a3141e70a03cfaca5485de310e5933752bd09459f95"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.080419 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv" event={"ID":"b3670533-6639-4b4f-88f4-15ebb2f2b8be","Type":"ContainerStarted","Data":"6a035f8ea8c530a49a762d414957a96c1d7b9f7ee2b7fc90ba5dfb503e109ea8"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.107700 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.109332 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:51.609304765 +0000 UTC m=+41.680084361 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.122587 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" event={"ID":"d393854b-5612-4686-8a9f-c6820b3eee5f","Type":"ContainerStarted","Data":"54280e8b088bc6342f49635814f641d70ef9d12cdcb17e335b1c2eb6033016bc"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.151607 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" event={"ID":"19a593cb-a446-4977-9235-4b4036d2f2f8","Type":"ContainerStarted","Data":"a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.153069 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.166864 4788 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-z97fq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.166948 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" podUID="19a593cb-a446-4977-9235-4b4036d2f2f8" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.169314 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" event={"ID":"09f0b73c-84d0-4165-8bef-45d954fcad4c","Type":"ContainerStarted","Data":"ab0d39efd413c41b121b2a4f922519e3fc50f6930e7e6d8069968cac017b7d94"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.169372 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" event={"ID":"09f0b73c-84d0-4165-8bef-45d954fcad4c","Type":"ContainerStarted","Data":"ac66cd019b2fd38c9fe9139ddaf5a0f68f52828b6730d38772b8d9967c84802f"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.170041 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.210609 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.213585 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:51.713223592 +0000 UTC m=+41.784003178 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.224573 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-khc6m" event={"ID":"c35c93ec-8f41-4ded-b3ba-d2533e13de17","Type":"ContainerStarted","Data":"3a1a998b52587c695ca3bcc5617a0e5ed3f676fe2361ee12975a01253b4e0bf3"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.260555 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"472b483facdae3beb47ec5d018113f26d1ff51fec4402cea7a0a49d520ff89c9"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.261317 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.292638 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:51 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:51 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:51 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.292695 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.298629 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" event={"ID":"8549231d-3a38-4403-aa63-e8f9090db6ef","Type":"ContainerStarted","Data":"c6c85f2c2446fc77e7dfd8b934b78080110fc017c9fb69db010835cfe6bb568d"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.299482 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.316874 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.318105 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:51.818091162 +0000 UTC m=+41.888870748 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.323408 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" event={"ID":"2ef5dd28-6a46-4364-9c71-56ddbb25c0dd","Type":"ContainerStarted","Data":"d12ee5e011c0e101842582d0bd620214b1ef8dcb02a5550bdeba7003064fa682"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.362814 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" event={"ID":"fdaa8c11-2c38-410e-a3f8-c81bbccf25ce","Type":"ContainerStarted","Data":"6dd7f7822d38d1936ea9beb643b85cd28bd2e72d8c8355f7a08f943907b5e61a"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.391634 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" event={"ID":"03fbacdb-e497-4e45-af5d-04099435b951","Type":"ContainerStarted","Data":"daad9c9ea8f529959ac2045a5a1cd6b8a36b71b1884ad9a8448a3ad984cd8a30"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.425558 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.425829 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:51.925818924 +0000 UTC m=+41.996598510 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.429004 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-fndxq" podStartSLOduration=21.428981133 podStartE2EDuration="21.428981133s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:51.424738497 +0000 UTC m=+41.495518083" watchObservedRunningTime="2025-12-11 09:21:51.428981133 +0000 UTC m=+41.499760719" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.433898 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" event={"ID":"926d4b75-3809-4fce-89e7-8076befa3b1b","Type":"ContainerStarted","Data":"75adb216915fc7f75d0837fe2d6bc04241d91580d8b03e49409cd3eb0d77d120"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.434909 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.453103 4788 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nmcnl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.453159 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" podUID="926d4b75-3809-4fce-89e7-8076befa3b1b" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.458416 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" event={"ID":"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d","Type":"ContainerStarted","Data":"e517087aef58e5f301cf2f199b292e2c9d1e76412639cc1cf1e35e15c4fee6a6"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.470051 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" podStartSLOduration=21.470031189 podStartE2EDuration="21.470031189s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:51.469986798 +0000 UTC m=+41.540766384" watchObservedRunningTime="2025-12-11 09:21:51.470031189 +0000 UTC m=+41.540810775" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.499134 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" event={"ID":"7e2cf7af-6694-45e9-8d9d-39fce413ba67","Type":"ContainerStarted","Data":"5fe8fe36532a384f35d51d029f8b72403490520ca1ddaa2861b1a0d4e286f257"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.504955 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xd6tv" podStartSLOduration=21.504931541 podStartE2EDuration="21.504931541s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:51.503829024 +0000 UTC m=+41.574608610" watchObservedRunningTime="2025-12-11 09:21:51.504931541 +0000 UTC m=+41.575711127" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.528529 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" event={"ID":"8f523f4a-d652-40cc-9613-5058118a5c79","Type":"ContainerStarted","Data":"6ed5a2a4e85bf10bbf506715bc819ef7f0258a0d3cf7bbaaa000a0f8b961519c"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.530986 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.531334 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.03129947 +0000 UTC m=+42.102079056 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.531543 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.534015 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.033995918 +0000 UTC m=+42.104775694 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.548620 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fsmmk" podStartSLOduration=21.548598382 podStartE2EDuration="21.548598382s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:51.545738461 +0000 UTC m=+41.616518067" watchObservedRunningTime="2025-12-11 09:21:51.548598382 +0000 UTC m=+41.619377968" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.578420 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7kz7q" event={"ID":"7ce8b184-b7bf-4e9e-96e5-b8a116493df7","Type":"ContainerStarted","Data":"715f7ac846350b0625cea265387f4849bcd2282862fe0f7da38f1dbd72775e22"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.581665 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.609013 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ddsq9" event={"ID":"d8564117-b311-48b1-810b-5c95106cf868","Type":"ContainerStarted","Data":"a20de50ef539941475a3966fcb7e216d5781d068f9dd8d163a443c93f1343f3b"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.611296 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" event={"ID":"01badcd8-dec7-4e94-8016-1ccc6a06a7cc","Type":"ContainerStarted","Data":"37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.612672 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.632597 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" event={"ID":"93f3b80a-e2ab-4f3c-87e8-806e5110cf9a","Type":"ContainerStarted","Data":"1d65bdc044aa326308a22e0488b2c9c7dd68ffd4a69d95463bc2c7ca630fe638"} Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.635960 4788 patch_prober.go:28] interesting pod/downloads-7954f5f757-dstfn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.636027 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dstfn" podUID="dcfe9e49-8116-4af5-84db-b958e5d3104b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.637062 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.638101 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.138088219 +0000 UTC m=+42.208867805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.659335 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9zzr5" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.663394 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k6hvj" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.739363 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.758870 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" podStartSLOduration=21.758843476 podStartE2EDuration="21.758843476s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:51.753577304 +0000 UTC m=+41.824356890" watchObservedRunningTime="2025-12-11 09:21:51.758843476 +0000 UTC m=+41.829623062" Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.759600 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.259572984 +0000 UTC m=+42.330352750 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.845937 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" podStartSLOduration=21.845896711 podStartE2EDuration="21.845896711s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:51.843318257 +0000 UTC m=+41.914097843" watchObservedRunningTime="2025-12-11 09:21:51.845896711 +0000 UTC m=+41.916676297" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.850405 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.850949 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.350919517 +0000 UTC m=+42.421699103 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.879929 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8ptnh" podStartSLOduration=21.879910751 podStartE2EDuration="21.879910751s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:51.87945515 +0000 UTC m=+41.950234736" watchObservedRunningTime="2025-12-11 09:21:51.879910751 +0000 UTC m=+41.950690337" Dec 11 09:21:51 crc kubenswrapper[4788]: I1211 09:21:51.952393 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:51 crc kubenswrapper[4788]: E1211 09:21:51.952842 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.452825953 +0000 UTC m=+42.523605539 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.055293 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.055477 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.555452648 +0000 UTC m=+42.626232234 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.055697 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.056099 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.556091223 +0000 UTC m=+42.626870809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.145983 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-tlz6t" podStartSLOduration=22.145965019 podStartE2EDuration="22.145965019s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:52.014659938 +0000 UTC m=+42.085439544" watchObservedRunningTime="2025-12-11 09:21:52.145965019 +0000 UTC m=+42.216744605" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.148016 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wz9mc"] Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.149176 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.157585 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.158173 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.658150554 +0000 UTC m=+42.728930140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.175836 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.189877 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wz9mc"] Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.260962 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnvv8\" (UniqueName: \"kubernetes.io/projected/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-kube-api-access-lnvv8\") pod \"community-operators-wz9mc\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.261013 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-catalog-content\") pod \"community-operators-wz9mc\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.261087 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.261106 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-utilities\") pod \"community-operators-wz9mc\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.261408 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.761395034 +0000 UTC m=+42.832174620 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.288543 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" podStartSLOduration=22.2885172 podStartE2EDuration="22.2885172s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:52.228675066 +0000 UTC m=+42.299454652" watchObservedRunningTime="2025-12-11 09:21:52.2885172 +0000 UTC m=+42.359296786" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.303123 4788 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-nqszp container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.303177 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" podUID="8549231d-3a38-4403-aa63-e8f9090db6ef" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.305507 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:52 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:52 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:52 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.305564 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.329790 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" podStartSLOduration=22.329770141 podStartE2EDuration="22.329770141s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:52.289452554 +0000 UTC m=+42.360232140" watchObservedRunningTime="2025-12-11 09:21:52.329770141 +0000 UTC m=+42.400549727" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.371719 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.372318 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-catalog-content\") pod \"community-operators-wz9mc\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.372515 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.872487349 +0000 UTC m=+42.943266945 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.372927 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-catalog-content\") pod \"community-operators-wz9mc\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.373479 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.373562 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-utilities\") pod \"community-operators-wz9mc\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.373786 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnvv8\" (UniqueName: \"kubernetes.io/projected/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-kube-api-access-lnvv8\") pod \"community-operators-wz9mc\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.375124 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.875109274 +0000 UTC m=+42.945888860 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.375868 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-utilities\") pod \"community-operators-wz9mc\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.434707 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.434777 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.447078 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-wpxdt" podStartSLOduration=22.447059152 podStartE2EDuration="22.447059152s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:52.332820147 +0000 UTC m=+42.403599743" watchObservedRunningTime="2025-12-11 09:21:52.447059152 +0000 UTC m=+42.517838738" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.455387 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" podStartSLOduration=22.45536667 podStartE2EDuration="22.45536667s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:52.445837141 +0000 UTC m=+42.516616727" watchObservedRunningTime="2025-12-11 09:21:52.45536667 +0000 UTC m=+42.526146256" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.459303 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tcfwf"] Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.467104 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.477853 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.478156 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:52.978138189 +0000 UTC m=+43.048917775 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.482656 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tcfwf"] Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.486256 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnvv8\" (UniqueName: \"kubernetes.io/projected/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-kube-api-access-lnvv8\") pod \"community-operators-wz9mc\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.504588 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.540879 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.581334 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-utilities\") pod \"community-operators-tcfwf\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.581398 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-catalog-content\") pod \"community-operators-tcfwf\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.581422 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jrln\" (UniqueName: \"kubernetes.io/projected/b627dc3d-e993-47f0-94af-4ee2f832e7a5-kube-api-access-6jrln\") pod \"community-operators-tcfwf\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.581474 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.581818 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:53.081802499 +0000 UTC m=+43.152582145 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.630084 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" podStartSLOduration=23.630070455 podStartE2EDuration="23.630070455s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:52.629642644 +0000 UTC m=+42.700422220" watchObservedRunningTime="2025-12-11 09:21:52.630070455 +0000 UTC m=+42.700850041" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.684769 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.685363 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-catalog-content\") pod \"community-operators-tcfwf\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.685487 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jrln\" (UniqueName: \"kubernetes.io/projected/b627dc3d-e993-47f0-94af-4ee2f832e7a5-kube-api-access-6jrln\") pod \"community-operators-tcfwf\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.685691 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-utilities\") pod \"community-operators-tcfwf\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.686308 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-utilities\") pod \"community-operators-tcfwf\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.686534 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:53.186510735 +0000 UTC m=+43.257290321 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.686973 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-catalog-content\") pod \"community-operators-tcfwf\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.748638 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-c7fld" event={"ID":"8f523f4a-d652-40cc-9613-5058118a5c79","Type":"ContainerStarted","Data":"13b7fa0ee3d2fcfc4f2f259b33dbee10e8be197b058a2ae8ab47b0ea47d10a87"} Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.754654 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9q655"] Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.755959 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.759926 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.787817 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.789714 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:53.28875902 +0000 UTC m=+43.359538606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.790463 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9q655"] Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.800489 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" event={"ID":"b9fcc600-c510-4fc7-a539-02ff3f235ee4","Type":"ContainerStarted","Data":"525086c33e906a09481036fdc21acad807ca6dab44493c7175a6bb98b9be230e"} Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.822605 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jrln\" (UniqueName: \"kubernetes.io/projected/b627dc3d-e993-47f0-94af-4ee2f832e7a5-kube-api-access-6jrln\") pod \"community-operators-tcfwf\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.838945 4788 patch_prober.go:28] interesting pod/apiserver-76f77b778f-mg2kx container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 11 09:21:52 crc kubenswrapper[4788]: [+]log ok Dec 11 09:21:52 crc kubenswrapper[4788]: [+]etcd ok Dec 11 09:21:52 crc kubenswrapper[4788]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 11 09:21:52 crc kubenswrapper[4788]: [+]poststarthook/generic-apiserver-start-informers ok Dec 11 09:21:52 crc kubenswrapper[4788]: [+]poststarthook/max-in-flight-filter ok Dec 11 09:21:52 crc kubenswrapper[4788]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 11 09:21:52 crc kubenswrapper[4788]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 11 09:21:52 crc kubenswrapper[4788]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 11 09:21:52 crc kubenswrapper[4788]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 11 09:21:52 crc kubenswrapper[4788]: [+]poststarthook/project.openshift.io-projectcache ok Dec 11 09:21:52 crc kubenswrapper[4788]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 11 09:21:52 crc kubenswrapper[4788]: [+]poststarthook/openshift.io-startinformers ok Dec 11 09:21:52 crc kubenswrapper[4788]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 11 09:21:52 crc kubenswrapper[4788]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 11 09:21:52 crc kubenswrapper[4788]: livez check failed Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.839001 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" podUID="93f3b80a-e2ab-4f3c-87e8-806e5110cf9a" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.858190 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c9gz6"] Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.859080 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.862087 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ddsq9" event={"ID":"d8564117-b311-48b1-810b-5c95106cf868","Type":"ContainerStarted","Data":"bb305ab93b0af51e324698e501209d52f2e7fdf17dd79a5205b865126bd0f547"} Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.863758 4788 patch_prober.go:28] interesting pod/downloads-7954f5f757-dstfn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.863812 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dstfn" podUID="dcfe9e49-8116-4af5-84db-b958e5d3104b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.866255 4788 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nmcnl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.866326 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" podUID="926d4b75-3809-4fce-89e7-8076befa3b1b" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.866482 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" podUID="37cb9609-c336-4c6b-86d5-1d680695f08e" containerName="kube-multus-additional-cni-plugins" containerID="cri-o://2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" gracePeriod=30 Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.882590 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.921731 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.922117 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:53.422094972 +0000 UTC m=+43.492874568 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.922207 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-utilities\") pod \"certified-operators-9q655\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.922288 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-catalog-content\") pod \"certified-operators-9q655\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.922341 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xqgj\" (UniqueName: \"kubernetes.io/projected/a35c01a8-01f0-48f9-a529-33ccc58161c9-kube-api-access-8xqgj\") pod \"certified-operators-9q655\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.922389 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:52 crc kubenswrapper[4788]: E1211 09:21:52.922705 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:53.422694747 +0000 UTC m=+43.493474333 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.938346 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c9gz6"] Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.959301 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-7kz7q" podStartSLOduration=11.959278011 podStartE2EDuration="11.959278011s" podCreationTimestamp="2025-12-11 09:21:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:52.957330223 +0000 UTC m=+43.028109809" watchObservedRunningTime="2025-12-11 09:21:52.959278011 +0000 UTC m=+43.030057607" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.970401 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.970488 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-nqszp" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.986309 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.986352 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.996414 4788 patch_prober.go:28] interesting pod/console-f9d7485db-jdvlj container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 11 09:21:52 crc kubenswrapper[4788]: I1211 09:21:52.996517 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-jdvlj" podUID="2d7342e7-facb-49b4-adee-0e6e25c9fa8e" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.023973 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.060809 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv6x4\" (UniqueName: \"kubernetes.io/projected/38884c7e-079f-4133-99b7-a8232008072d-kube-api-access-lv6x4\") pod \"certified-operators-c9gz6\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.061028 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xqgj\" (UniqueName: \"kubernetes.io/projected/a35c01a8-01f0-48f9-a529-33ccc58161c9-kube-api-access-8xqgj\") pod \"certified-operators-9q655\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.061621 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-utilities\") pod \"certified-operators-c9gz6\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.061851 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-utilities\") pod \"certified-operators-9q655\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.061873 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-catalog-content\") pod \"certified-operators-c9gz6\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.062086 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-catalog-content\") pod \"certified-operators-9q655\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.072123 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:53.57209316 +0000 UTC m=+43.642872746 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.080103 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" podStartSLOduration=24.08008168 podStartE2EDuration="24.08008168s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:53.075093445 +0000 UTC m=+43.145873031" watchObservedRunningTime="2025-12-11 09:21:53.08008168 +0000 UTC m=+43.150861266" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.088402 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-catalog-content\") pod \"certified-operators-9q655\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.115018 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" podStartSLOduration=24.114993722 podStartE2EDuration="24.114993722s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:53.11089017 +0000 UTC m=+43.181669756" watchObservedRunningTime="2025-12-11 09:21:53.114993722 +0000 UTC m=+43.185773308" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.142528 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xqgj\" (UniqueName: \"kubernetes.io/projected/a35c01a8-01f0-48f9-a529-33ccc58161c9-kube-api-access-8xqgj\") pod \"certified-operators-9q655\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.164102 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-utilities\") pod \"certified-operators-c9gz6\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.164166 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-catalog-content\") pod \"certified-operators-c9gz6\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.164248 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv6x4\" (UniqueName: \"kubernetes.io/projected/38884c7e-079f-4133-99b7-a8232008072d-kube-api-access-lv6x4\") pod \"certified-operators-c9gz6\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.164307 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.164609 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:53.664595982 +0000 UTC m=+43.735375568 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.164835 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-utilities\") pod \"certified-operators-c9gz6\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.165508 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-catalog-content\") pod \"certified-operators-c9gz6\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.216634 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv6x4\" (UniqueName: \"kubernetes.io/projected/38884c7e-079f-4133-99b7-a8232008072d-kube-api-access-lv6x4\") pod \"certified-operators-c9gz6\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.245633 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.268153 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.268742 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:53.768722924 +0000 UTC m=+43.839502520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.290210 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:53 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:53 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:53 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.290291 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.369811 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.370436 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:53.870408534 +0000 UTC m=+43.941188120 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.409594 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wz9mc"] Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.471348 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.471859 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:53.971814898 +0000 UTC m=+44.042594484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.472353 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.478104 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:53.978087825 +0000 UTC m=+44.048867401 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.479917 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-utilities\") pod \"certified-operators-9q655\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.558766 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tcfwf"] Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.573840 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.574126 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.074096174 +0000 UTC m=+44.144875760 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.574207 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.574604 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.074590657 +0000 UTC m=+44.145370243 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: W1211 09:21:53.577625 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb627dc3d_e993_47f0_94af_4ee2f832e7a5.slice/crio-104e556215bcd2b710977dabd6b347096d21ab708d0351f4cf5d177bc12d4fec WatchSource:0}: Error finding container 104e556215bcd2b710977dabd6b347096d21ab708d0351f4cf5d177bc12d4fec: Status 404 returned error can't find the container with id 104e556215bcd2b710977dabd6b347096d21ab708d0351f4cf5d177bc12d4fec Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.656682 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c9gz6"] Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.677441 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.677881 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.177860897 +0000 UTC m=+44.248640493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: W1211 09:21:53.685917 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38884c7e_079f_4133_99b7_a8232008072d.slice/crio-a567c53648d4c79a67ebfa9cef8423a730e52305763b781365cbc5303184856f WatchSource:0}: Error finding container a567c53648d4c79a67ebfa9cef8423a730e52305763b781365cbc5303184856f: Status 404 returned error can't find the container with id a567c53648d4c79a67ebfa9cef8423a730e52305763b781365cbc5303184856f Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.723864 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.769334 4788 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.782563 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.783072 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.283054556 +0000 UTC m=+44.353834142 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.880090 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-ddsq9" event={"ID":"d8564117-b311-48b1-810b-5c95106cf868","Type":"ContainerStarted","Data":"1c7f1e9c4882f92867181e0ad4696bf639fb288e13efe6ccd7956de96005963f"} Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.892733 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.893325 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.393299161 +0000 UTC m=+44.464078747 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.896978 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz9mc" event={"ID":"23f0f07c-d5d8-4a8c-8546-77e15ef979f5","Type":"ContainerStarted","Data":"33459cdbb3aeb0d58e5dec01f55ac4d9b6e54d39de1b3c3b6bb3c109c2c7cf40"} Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.897055 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz9mc" event={"ID":"23f0f07c-d5d8-4a8c-8546-77e15ef979f5","Type":"ContainerStarted","Data":"9cf8def9be9dd85a7f840953308471efdcda89482b72fa1df655bd3039e9c9e3"} Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.910423 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-ddsq9" podStartSLOduration=24.910383317 podStartE2EDuration="24.910383317s" podCreationTimestamp="2025-12-11 09:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:53.907633979 +0000 UTC m=+43.978413565" watchObservedRunningTime="2025-12-11 09:21:53.910383317 +0000 UTC m=+43.981162903" Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.921524 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" event={"ID":"b9fcc600-c510-4fc7-a539-02ff3f235ee4","Type":"ContainerStarted","Data":"658c9e239e0147adca1ec9d3e46a7ad3e66ce64f2c3e1733e006b5126641c03a"} Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.937538 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9gz6" event={"ID":"38884c7e-079f-4133-99b7-a8232008072d","Type":"ContainerStarted","Data":"a567c53648d4c79a67ebfa9cef8423a730e52305763b781365cbc5303184856f"} Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.963826 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tcfwf" event={"ID":"b627dc3d-e993-47f0-94af-4ee2f832e7a5","Type":"ContainerStarted","Data":"104e556215bcd2b710977dabd6b347096d21ab708d0351f4cf5d177bc12d4fec"} Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.994186 4788 generic.go:334] "Generic (PLEG): container finished" podID="b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d" containerID="e517087aef58e5f301cf2f199b292e2c9d1e76412639cc1cf1e35e15c4fee6a6" exitCode=0 Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.995033 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" event={"ID":"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d","Type":"ContainerDied","Data":"e517087aef58e5f301cf2f199b292e2c9d1e76412639cc1cf1e35e15c4fee6a6"} Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.995911 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:53 crc kubenswrapper[4788]: E1211 09:21:53.997422 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.497411142 +0000 UTC m=+44.568190728 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.998172 4788 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nmcnl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Dec 11 09:21:53 crc kubenswrapper[4788]: I1211 09:21:53.998312 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" podUID="926d4b75-3809-4fce-89e7-8076befa3b1b" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.097043 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:54 crc kubenswrapper[4788]: E1211 09:21:54.098946 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.598930999 +0000 UTC m=+44.669710585 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.201211 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:54 crc kubenswrapper[4788]: E1211 09:21:54.201798 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.701780409 +0000 UTC m=+44.772559995 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.250697 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4cclk"] Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.252636 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.257553 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.267315 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4cclk"] Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.282356 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.304493 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:54 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:54 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:54 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.305067 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.305187 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:54 crc kubenswrapper[4788]: E1211 09:21:54.305446 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.805420899 +0000 UTC m=+44.876200485 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.305589 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:54 crc kubenswrapper[4788]: E1211 09:21:54.305992 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.805983823 +0000 UTC m=+44.876763409 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.404207 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9q655"] Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.407291 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:54 crc kubenswrapper[4788]: E1211 09:21:54.407817 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.907779326 +0000 UTC m=+44.978558912 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.407992 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-catalog-content\") pod \"redhat-marketplace-4cclk\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.408059 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfc4n\" (UniqueName: \"kubernetes.io/projected/efaaaaf2-7778-46d4-9400-7c31a2f82765-kube-api-access-xfc4n\") pod \"redhat-marketplace-4cclk\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.408101 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.408194 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-utilities\") pod \"redhat-marketplace-4cclk\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:21:54 crc kubenswrapper[4788]: E1211 09:21:54.408724 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:54.908704909 +0000 UTC m=+44.979484505 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:54 crc kubenswrapper[4788]: W1211 09:21:54.430164 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda35c01a8_01f0_48f9_a529_33ccc58161c9.slice/crio-a2c48857238e7ec2c40287025afe20570c51109df3b04d97e88e0f25122944cd WatchSource:0}: Error finding container a2c48857238e7ec2c40287025afe20570c51109df3b04d97e88e0f25122944cd: Status 404 returned error can't find the container with id a2c48857238e7ec2c40287025afe20570c51109df3b04d97e88e0f25122944cd Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.509216 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:54 crc kubenswrapper[4788]: E1211 09:21:54.509423 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:55.009381235 +0000 UTC m=+45.080160821 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.509486 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-utilities\") pod \"redhat-marketplace-4cclk\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.509739 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-catalog-content\") pod \"redhat-marketplace-4cclk\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.509785 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfc4n\" (UniqueName: \"kubernetes.io/projected/efaaaaf2-7778-46d4-9400-7c31a2f82765-kube-api-access-xfc4n\") pod \"redhat-marketplace-4cclk\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.509812 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.510022 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-utilities\") pod \"redhat-marketplace-4cclk\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.510289 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-catalog-content\") pod \"redhat-marketplace-4cclk\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:21:54 crc kubenswrapper[4788]: E1211 09:21:54.510347 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 09:21:55.010334289 +0000 UTC m=+45.081114085 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ll24x" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.561923 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfc4n\" (UniqueName: \"kubernetes.io/projected/efaaaaf2-7778-46d4-9400-7c31a2f82765-kube-api-access-xfc4n\") pod \"redhat-marketplace-4cclk\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.604846 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mdf99"] Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.606021 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.614963 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:54 crc kubenswrapper[4788]: E1211 09:21:54.615503 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 09:21:55.115480246 +0000 UTC m=+45.186259832 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.619142 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.620281 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.640755 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdf99"] Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.650749 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.651182 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.668391 4788 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-11T09:21:53.769763674Z","Handler":null,"Name":""} Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.700621 4788 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.700675 4788 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.725538 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.725598 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dh8l8\" (UniqueName: \"kubernetes.io/projected/1cc339d9-17c7-4461-bc56-a6c12f422aa9-kube-api-access-dh8l8\") pod \"redhat-marketplace-mdf99\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.725639 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-utilities\") pod \"redhat-marketplace-mdf99\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.725675 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-catalog-content\") pod \"redhat-marketplace-mdf99\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.786729 4788 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.786803 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.792322 4788 patch_prober.go:28] interesting pod/downloads-7954f5f757-dstfn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.792382 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dstfn" podUID="dcfe9e49-8116-4af5-84db-b958e5d3104b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.792730 4788 patch_prober.go:28] interesting pod/downloads-7954f5f757-dstfn container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.792746 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-dstfn" podUID="dcfe9e49-8116-4af5-84db-b958e5d3104b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.826464 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-utilities\") pod \"redhat-marketplace-mdf99\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.826515 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-catalog-content\") pod \"redhat-marketplace-mdf99\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.826599 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dh8l8\" (UniqueName: \"kubernetes.io/projected/1cc339d9-17c7-4461-bc56-a6c12f422aa9-kube-api-access-dh8l8\") pod \"redhat-marketplace-mdf99\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.827962 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-utilities\") pod \"redhat-marketplace-mdf99\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.828378 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-catalog-content\") pod \"redhat-marketplace-mdf99\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.857131 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dh8l8\" (UniqueName: \"kubernetes.io/projected/1cc339d9-17c7-4461-bc56-a6c12f422aa9-kube-api-access-dh8l8\") pod \"redhat-marketplace-mdf99\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.890402 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ll24x\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.929166 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.935347 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:21:54 crc kubenswrapper[4788]: I1211 09:21:54.937554 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:21:55 crc kubenswrapper[4788]: E1211 09:21:55.043901 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.044667 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:55 crc kubenswrapper[4788]: E1211 09:21:55.063462 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.066166 4788 generic.go:334] "Generic (PLEG): container finished" podID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" containerID="0b596b05f7147bdc84e0e637b9f8c8b669ba7cec060b7285744abbe5a030bb5b" exitCode=0 Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.066238 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tcfwf" event={"ID":"b627dc3d-e993-47f0-94af-4ee2f832e7a5","Type":"ContainerDied","Data":"0b596b05f7147bdc84e0e637b9f8c8b669ba7cec060b7285744abbe5a030bb5b"} Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.068432 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 09:21:55 crc kubenswrapper[4788]: E1211 09:21:55.072178 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:21:55 crc kubenswrapper[4788]: E1211 09:21:55.072255 4788 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" podUID="37cb9609-c336-4c6b-86d5-1d680695f08e" containerName="kube-multus-additional-cni-plugins" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.080610 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" event={"ID":"b9fcc600-c510-4fc7-a539-02ff3f235ee4","Type":"ContainerStarted","Data":"7a42db4005077d02375610ce250df416d8a3bf9046c0c31e8f96e6754ea0068c"} Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.086415 4788 generic.go:334] "Generic (PLEG): container finished" podID="38884c7e-079f-4133-99b7-a8232008072d" containerID="25ed7d97cef742272a9ba95e4e388bfae4ad067be1512a3f476313c4288ff84b" exitCode=0 Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.086474 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9gz6" event={"ID":"38884c7e-079f-4133-99b7-a8232008072d","Type":"ContainerDied","Data":"25ed7d97cef742272a9ba95e4e388bfae4ad067be1512a3f476313c4288ff84b"} Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.088065 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.095102 4788 generic.go:334] "Generic (PLEG): container finished" podID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerID="51bb90b9cc045c0f904731c7f899b13a86956a65b6313888546d4b8af06db734" exitCode=0 Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.095183 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9q655" event={"ID":"a35c01a8-01f0-48f9-a529-33ccc58161c9","Type":"ContainerDied","Data":"51bb90b9cc045c0f904731c7f899b13a86956a65b6313888546d4b8af06db734"} Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.095216 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9q655" event={"ID":"a35c01a8-01f0-48f9-a529-33ccc58161c9","Type":"ContainerStarted","Data":"a2c48857238e7ec2c40287025afe20570c51109df3b04d97e88e0f25122944cd"} Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.137190 4788 generic.go:334] "Generic (PLEG): container finished" podID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerID="33459cdbb3aeb0d58e5dec01f55ac4d9b6e54d39de1b3c3b6bb3c109c2c7cf40" exitCode=0 Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.137811 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz9mc" event={"ID":"23f0f07c-d5d8-4a8c-8546-77e15ef979f5","Type":"ContainerDied","Data":"33459cdbb3aeb0d58e5dec01f55ac4d9b6e54d39de1b3c3b6bb3c109c2c7cf40"} Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.159979 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rt2ms" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.280690 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-2s9b9" podStartSLOduration=14.280663408 podStartE2EDuration="14.280663408s" podCreationTimestamp="2025-12-11 09:21:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:55.213572671 +0000 UTC m=+45.284352257" watchObservedRunningTime="2025-12-11 09:21:55.280663408 +0000 UTC m=+45.351442994" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.289529 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:55 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:55 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:55 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.289580 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.316692 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.317654 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.326334 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.326743 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.351359 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.451019 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8949ae02-daa2-4d9a-be86-341d33541b7c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8949ae02-daa2-4d9a-be86-341d33541b7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.451085 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8949ae02-daa2-4d9a-be86-341d33541b7c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8949ae02-daa2-4d9a-be86-341d33541b7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.552386 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8949ae02-daa2-4d9a-be86-341d33541b7c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8949ae02-daa2-4d9a-be86-341d33541b7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.552450 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8949ae02-daa2-4d9a-be86-341d33541b7c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8949ae02-daa2-4d9a-be86-341d33541b7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.552967 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8949ae02-daa2-4d9a-be86-341d33541b7c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8949ae02-daa2-4d9a-be86-341d33541b7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.581365 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4cclk"] Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.596605 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8949ae02-daa2-4d9a-be86-341d33541b7c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8949ae02-daa2-4d9a-be86-341d33541b7c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.598656 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-j9fxc"] Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.608870 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.610997 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.640711 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j9fxc"] Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.666431 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.755404 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-catalog-content\") pod \"redhat-operators-j9fxc\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.755467 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-utilities\") pod \"redhat-operators-j9fxc\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.755508 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m66d7\" (UniqueName: \"kubernetes.io/projected/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-kube-api-access-m66d7\") pod \"redhat-operators-j9fxc\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.786502 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdf99"] Dec 11 09:21:55 crc kubenswrapper[4788]: W1211 09:21:55.830773 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cc339d9_17c7_4461_bc56_a6c12f422aa9.slice/crio-cf3b0d2ef8643957ff567fb2bf38d193e36f83eebff7af06d20a82d0ec758592 WatchSource:0}: Error finding container cf3b0d2ef8643957ff567fb2bf38d193e36f83eebff7af06d20a82d0ec758592: Status 404 returned error can't find the container with id cf3b0d2ef8643957ff567fb2bf38d193e36f83eebff7af06d20a82d0ec758592 Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.831519 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.834025 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ll24x"] Dec 11 09:21:55 crc kubenswrapper[4788]: W1211 09:21:55.851615 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9bede3eb_f7c3_40df_84a6_2c34e3834acd.slice/crio-8740f60773bcbc6fee4ec948cefe0f16e1d6b38a617c6ce65a64bbd0ae690a6b WatchSource:0}: Error finding container 8740f60773bcbc6fee4ec948cefe0f16e1d6b38a617c6ce65a64bbd0ae690a6b: Status 404 returned error can't find the container with id 8740f60773bcbc6fee4ec948cefe0f16e1d6b38a617c6ce65a64bbd0ae690a6b Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.857636 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-catalog-content\") pod \"redhat-operators-j9fxc\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.857702 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-utilities\") pod \"redhat-operators-j9fxc\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.857750 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m66d7\" (UniqueName: \"kubernetes.io/projected/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-kube-api-access-m66d7\") pod \"redhat-operators-j9fxc\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.858443 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-catalog-content\") pod \"redhat-operators-j9fxc\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.858683 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-utilities\") pod \"redhat-operators-j9fxc\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.906595 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m66d7\" (UniqueName: \"kubernetes.io/projected/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-kube-api-access-m66d7\") pod \"redhat-operators-j9fxc\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.919723 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 09:21:55 crc kubenswrapper[4788]: E1211 09:21:55.919959 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d" containerName="collect-profiles" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.919974 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d" containerName="collect-profiles" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.920069 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d" containerName="collect-profiles" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.926583 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.929564 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.932404 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.933345 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.958816 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcwqj\" (UniqueName: \"kubernetes.io/projected/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-kube-api-access-pcwqj\") pod \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.958889 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-config-volume\") pod \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.959027 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-secret-volume\") pod \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\" (UID: \"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d\") " Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.960741 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-config-volume" (OuterVolumeSpecName: "config-volume") pod "b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d" (UID: "b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.964250 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:21:55 crc kubenswrapper[4788]: I1211 09:21:55.967977 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d" (UID: "b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.007159 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-kube-api-access-pcwqj" (OuterVolumeSpecName: "kube-api-access-pcwqj") pod "b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d" (UID: "b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d"). InnerVolumeSpecName "kube-api-access-pcwqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.028725 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6zfxb"] Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.029880 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.056832 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6zfxb"] Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.063777 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/089fa2cc-3239-491b-8481-c9d068f2b3cd-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"089fa2cc-3239-491b-8481-c9d068f2b3cd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.063860 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/089fa2cc-3239-491b-8481-c9d068f2b3cd-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"089fa2cc-3239-491b-8481-c9d068f2b3cd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.063942 4788 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.063962 4788 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.063976 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcwqj\" (UniqueName: \"kubernetes.io/projected/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d-kube-api-access-pcwqj\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.165353 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-catalog-content\") pod \"redhat-operators-6zfxb\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.165438 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/089fa2cc-3239-491b-8481-c9d068f2b3cd-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"089fa2cc-3239-491b-8481-c9d068f2b3cd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.165472 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-utilities\") pod \"redhat-operators-6zfxb\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.165520 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/089fa2cc-3239-491b-8481-c9d068f2b3cd-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"089fa2cc-3239-491b-8481-c9d068f2b3cd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.165595 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvpcj\" (UniqueName: \"kubernetes.io/projected/58eec917-a769-40a8-b458-6c7b189dcb19-kube-api-access-cvpcj\") pod \"redhat-operators-6zfxb\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.166219 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/089fa2cc-3239-491b-8481-c9d068f2b3cd-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"089fa2cc-3239-491b-8481-c9d068f2b3cd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.190177 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdf99" event={"ID":"1cc339d9-17c7-4461-bc56-a6c12f422aa9","Type":"ContainerStarted","Data":"cf3b0d2ef8643957ff567fb2bf38d193e36f83eebff7af06d20a82d0ec758592"} Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.205037 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cclk" event={"ID":"efaaaaf2-7778-46d4-9400-7c31a2f82765","Type":"ContainerStarted","Data":"10cdf08fc160444684e3a3970bf76c221775628c6a9cde86eb996723ca2ead7a"} Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.205103 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cclk" event={"ID":"efaaaaf2-7778-46d4-9400-7c31a2f82765","Type":"ContainerStarted","Data":"5d3aa19a3bdf93e8f0f1f3ad1f4303730f82db6225750423bc95187d27a57ef0"} Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.214051 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/089fa2cc-3239-491b-8481-c9d068f2b3cd-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"089fa2cc-3239-491b-8481-c9d068f2b3cd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.237547 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" event={"ID":"b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d","Type":"ContainerDied","Data":"2a7630e78f4bc2f97ca6048d582f7616b0c3471774e25fe8dd049b9dc9f82c1c"} Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.237591 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a7630e78f4bc2f97ca6048d582f7616b0c3471774e25fe8dd049b9dc9f82c1c" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.237662 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.239952 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.245138 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" event={"ID":"9bede3eb-f7c3-40df-84a6-2c34e3834acd","Type":"ContainerStarted","Data":"8740f60773bcbc6fee4ec948cefe0f16e1d6b38a617c6ce65a64bbd0ae690a6b"} Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.266832 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-catalog-content\") pod \"redhat-operators-6zfxb\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.266912 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-utilities\") pod \"redhat-operators-6zfxb\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.266991 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvpcj\" (UniqueName: \"kubernetes.io/projected/58eec917-a769-40a8-b458-6c7b189dcb19-kube-api-access-cvpcj\") pod \"redhat-operators-6zfxb\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.268109 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-catalog-content\") pod \"redhat-operators-6zfxb\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.271778 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-utilities\") pod \"redhat-operators-6zfxb\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.321182 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:56 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:56 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:56 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.321746 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.330809 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.337118 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvpcj\" (UniqueName: \"kubernetes.io/projected/58eec917-a769-40a8-b458-6c7b189dcb19-kube-api-access-cvpcj\") pod \"redhat-operators-6zfxb\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.447972 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.567250 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 11 09:21:56 crc kubenswrapper[4788]: I1211 09:21:56.765685 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j9fxc"] Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.141188 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-7kz7q" Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.279189 4788 generic.go:334] "Generic (PLEG): container finished" podID="efaaaaf2-7778-46d4-9400-7c31a2f82765" containerID="10cdf08fc160444684e3a3970bf76c221775628c6a9cde86eb996723ca2ead7a" exitCode=0 Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.279482 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cclk" event={"ID":"efaaaaf2-7778-46d4-9400-7c31a2f82765","Type":"ContainerDied","Data":"10cdf08fc160444684e3a3970bf76c221775628c6a9cde86eb996723ca2ead7a"} Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.282774 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" event={"ID":"9bede3eb-f7c3-40df-84a6-2c34e3834acd","Type":"ContainerStarted","Data":"b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581"} Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.282950 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.289947 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:57 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:57 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:57 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.290077 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.291917 4788 generic.go:334] "Generic (PLEG): container finished" podID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" containerID="d6afd0068dfa2c1354939d6b2b0ccbde32056e0edde1eb02d2a9b15d21503ba1" exitCode=0 Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.292047 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdf99" event={"ID":"1cc339d9-17c7-4461-bc56-a6c12f422aa9","Type":"ContainerDied","Data":"d6afd0068dfa2c1354939d6b2b0ccbde32056e0edde1eb02d2a9b15d21503ba1"} Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.310670 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" podStartSLOduration=27.310655922 podStartE2EDuration="27.310655922s" podCreationTimestamp="2025-12-11 09:21:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:57.310350944 +0000 UTC m=+47.381130530" watchObservedRunningTime="2025-12-11 09:21:57.310655922 +0000 UTC m=+47.381435508" Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.316963 4788 generic.go:334] "Generic (PLEG): container finished" podID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerID="9380ab1899f9c190a6b634832a5ce6b1bfcfec5f0a4809acae8f9ae54d438bff" exitCode=0 Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.317236 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9fxc" event={"ID":"8e697aeb-5f1a-45f0-9c6e-5b65e638342c","Type":"ContainerDied","Data":"9380ab1899f9c190a6b634832a5ce6b1bfcfec5f0a4809acae8f9ae54d438bff"} Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.317278 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9fxc" event={"ID":"8e697aeb-5f1a-45f0-9c6e-5b65e638342c","Type":"ContainerStarted","Data":"fd073c117cc021ec37b96350d1b4421533cb41502ef6b19e79f076e7ee07afd6"} Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.321557 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8949ae02-daa2-4d9a-be86-341d33541b7c","Type":"ContainerStarted","Data":"04f6535623bd0b4f6beacc4e681e40158569115f4b005411e595a0d5b22360da"} Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.321632 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8949ae02-daa2-4d9a-be86-341d33541b7c","Type":"ContainerStarted","Data":"84f19bbb46021b18ba09a997d6120d319d892eb6289fdb6b1643f1cd75148ebc"} Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.367474 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.367440551 podStartE2EDuration="2.367440551s" podCreationTimestamp="2025-12-11 09:21:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:57.364809105 +0000 UTC m=+47.435588701" watchObservedRunningTime="2025-12-11 09:21:57.367440551 +0000 UTC m=+47.438220137" Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.472599 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.487215 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6zfxb"] Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.494735 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-mg2kx" Dec 11 09:21:57 crc kubenswrapper[4788]: I1211 09:21:57.546521 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 09:21:58 crc kubenswrapper[4788]: I1211 09:21:58.287431 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:58 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:58 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:58 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:58 crc kubenswrapper[4788]: I1211 09:21:58.287789 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:58 crc kubenswrapper[4788]: I1211 09:21:58.358900 4788 generic.go:334] "Generic (PLEG): container finished" podID="8949ae02-daa2-4d9a-be86-341d33541b7c" containerID="04f6535623bd0b4f6beacc4e681e40158569115f4b005411e595a0d5b22360da" exitCode=0 Dec 11 09:21:58 crc kubenswrapper[4788]: I1211 09:21:58.358978 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8949ae02-daa2-4d9a-be86-341d33541b7c","Type":"ContainerDied","Data":"04f6535623bd0b4f6beacc4e681e40158569115f4b005411e595a0d5b22360da"} Dec 11 09:21:58 crc kubenswrapper[4788]: I1211 09:21:58.364058 4788 generic.go:334] "Generic (PLEG): container finished" podID="58eec917-a769-40a8-b458-6c7b189dcb19" containerID="1e8fb418bb37cda0de9a1f299bc70015cfb888c484c2ed8be1796aeb86a4a992" exitCode=0 Dec 11 09:21:58 crc kubenswrapper[4788]: I1211 09:21:58.364453 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zfxb" event={"ID":"58eec917-a769-40a8-b458-6c7b189dcb19","Type":"ContainerDied","Data":"1e8fb418bb37cda0de9a1f299bc70015cfb888c484c2ed8be1796aeb86a4a992"} Dec 11 09:21:58 crc kubenswrapper[4788]: I1211 09:21:58.364518 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zfxb" event={"ID":"58eec917-a769-40a8-b458-6c7b189dcb19","Type":"ContainerStarted","Data":"e2ed8e88b3db183136c0fa5d0315262d5a33f6ae35b348ebd2478cfa1bee785c"} Dec 11 09:21:58 crc kubenswrapper[4788]: I1211 09:21:58.377095 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"089fa2cc-3239-491b-8481-c9d068f2b3cd","Type":"ContainerStarted","Data":"e8df06755d0761358aba4530a04d82f8dc0ead0bd1f2a1eace738c02f22eac14"} Dec 11 09:21:58 crc kubenswrapper[4788]: I1211 09:21:58.439084 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.439039668 podStartE2EDuration="3.439039668s" podCreationTimestamp="2025-12-11 09:21:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:21:58.434608347 +0000 UTC m=+48.505387933" watchObservedRunningTime="2025-12-11 09:21:58.439039668 +0000 UTC m=+48.509819254" Dec 11 09:21:59 crc kubenswrapper[4788]: I1211 09:21:59.287498 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:21:59 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:21:59 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:21:59 crc kubenswrapper[4788]: healthz check failed Dec 11 09:21:59 crc kubenswrapper[4788]: I1211 09:21:59.287944 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:21:59 crc kubenswrapper[4788]: I1211 09:21:59.414287 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"089fa2cc-3239-491b-8481-c9d068f2b3cd","Type":"ContainerStarted","Data":"1cf1d10d3c10fb75785eadd39c610aa68d8d2a64e163eeaee9cca0518887551a"} Dec 11 09:21:59 crc kubenswrapper[4788]: I1211 09:21:59.736340 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 09:21:59 crc kubenswrapper[4788]: I1211 09:21:59.787867 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8949ae02-daa2-4d9a-be86-341d33541b7c-kube-api-access\") pod \"8949ae02-daa2-4d9a-be86-341d33541b7c\" (UID: \"8949ae02-daa2-4d9a-be86-341d33541b7c\") " Dec 11 09:21:59 crc kubenswrapper[4788]: I1211 09:21:59.788168 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8949ae02-daa2-4d9a-be86-341d33541b7c-kubelet-dir\") pod \"8949ae02-daa2-4d9a-be86-341d33541b7c\" (UID: \"8949ae02-daa2-4d9a-be86-341d33541b7c\") " Dec 11 09:21:59 crc kubenswrapper[4788]: I1211 09:21:59.788355 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8949ae02-daa2-4d9a-be86-341d33541b7c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8949ae02-daa2-4d9a-be86-341d33541b7c" (UID: "8949ae02-daa2-4d9a-be86-341d33541b7c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:21:59 crc kubenswrapper[4788]: I1211 09:21:59.789145 4788 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8949ae02-daa2-4d9a-be86-341d33541b7c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 09:21:59 crc kubenswrapper[4788]: I1211 09:21:59.810004 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8949ae02-daa2-4d9a-be86-341d33541b7c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8949ae02-daa2-4d9a-be86-341d33541b7c" (UID: "8949ae02-daa2-4d9a-be86-341d33541b7c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:21:59 crc kubenswrapper[4788]: I1211 09:21:59.890369 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8949ae02-daa2-4d9a-be86-341d33541b7c-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 09:22:00 crc kubenswrapper[4788]: I1211 09:22:00.284507 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:22:00 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:22:00 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:22:00 crc kubenswrapper[4788]: healthz check failed Dec 11 09:22:00 crc kubenswrapper[4788]: I1211 09:22:00.284633 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:22:00 crc kubenswrapper[4788]: I1211 09:22:00.440676 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8949ae02-daa2-4d9a-be86-341d33541b7c","Type":"ContainerDied","Data":"84f19bbb46021b18ba09a997d6120d319d892eb6289fdb6b1643f1cd75148ebc"} Dec 11 09:22:00 crc kubenswrapper[4788]: I1211 09:22:00.441164 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84f19bbb46021b18ba09a997d6120d319d892eb6289fdb6b1643f1cd75148ebc" Dec 11 09:22:00 crc kubenswrapper[4788]: I1211 09:22:00.440721 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 09:22:01 crc kubenswrapper[4788]: I1211 09:22:01.284705 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:22:01 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:22:01 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:22:01 crc kubenswrapper[4788]: healthz check failed Dec 11 09:22:01 crc kubenswrapper[4788]: I1211 09:22:01.284792 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:22:01 crc kubenswrapper[4788]: I1211 09:22:01.491962 4788 generic.go:334] "Generic (PLEG): container finished" podID="089fa2cc-3239-491b-8481-c9d068f2b3cd" containerID="1cf1d10d3c10fb75785eadd39c610aa68d8d2a64e163eeaee9cca0518887551a" exitCode=0 Dec 11 09:22:01 crc kubenswrapper[4788]: I1211 09:22:01.492029 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"089fa2cc-3239-491b-8481-c9d068f2b3cd","Type":"ContainerDied","Data":"1cf1d10d3c10fb75785eadd39c610aa68d8d2a64e163eeaee9cca0518887551a"} Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.260424 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.278972 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.288711 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:22:02 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:22:02 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:22:02 crc kubenswrapper[4788]: healthz check failed Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.288798 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.806738 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.845743 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=0.84572631 podStartE2EDuration="845.72631ms" podCreationTimestamp="2025-12-11 09:22:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:22:02.843839383 +0000 UTC m=+52.914618969" watchObservedRunningTime="2025-12-11 09:22:02.84572631 +0000 UTC m=+52.916505886" Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.847979 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/089fa2cc-3239-491b-8481-c9d068f2b3cd-kube-api-access\") pod \"089fa2cc-3239-491b-8481-c9d068f2b3cd\" (UID: \"089fa2cc-3239-491b-8481-c9d068f2b3cd\") " Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.848046 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/089fa2cc-3239-491b-8481-c9d068f2b3cd-kubelet-dir\") pod \"089fa2cc-3239-491b-8481-c9d068f2b3cd\" (UID: \"089fa2cc-3239-491b-8481-c9d068f2b3cd\") " Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.849650 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/089fa2cc-3239-491b-8481-c9d068f2b3cd-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "089fa2cc-3239-491b-8481-c9d068f2b3cd" (UID: "089fa2cc-3239-491b-8481-c9d068f2b3cd"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.860614 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/089fa2cc-3239-491b-8481-c9d068f2b3cd-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "089fa2cc-3239-491b-8481-c9d068f2b3cd" (UID: "089fa2cc-3239-491b-8481-c9d068f2b3cd"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.950698 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/089fa2cc-3239-491b-8481-c9d068f2b3cd-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.950741 4788 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/089fa2cc-3239-491b-8481-c9d068f2b3cd-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.982939 4788 patch_prober.go:28] interesting pod/console-f9d7485db-jdvlj container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 11 09:22:02 crc kubenswrapper[4788]: I1211 09:22:02.982997 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-jdvlj" podUID="2d7342e7-facb-49b4-adee-0e6e25c9fa8e" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 11 09:22:03 crc kubenswrapper[4788]: I1211 09:22:03.102038 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:22:03 crc kubenswrapper[4788]: I1211 09:22:03.290453 4788 patch_prober.go:28] interesting pod/router-default-5444994796-6xr2p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 09:22:03 crc kubenswrapper[4788]: [-]has-synced failed: reason withheld Dec 11 09:22:03 crc kubenswrapper[4788]: [+]process-running ok Dec 11 09:22:03 crc kubenswrapper[4788]: healthz check failed Dec 11 09:22:03 crc kubenswrapper[4788]: I1211 09:22:03.290547 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6xr2p" podUID="90a71b66-dd01-4895-8b06-98a8f650cabc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:22:03 crc kubenswrapper[4788]: I1211 09:22:03.519968 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 09:22:03 crc kubenswrapper[4788]: I1211 09:22:03.519942 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"089fa2cc-3239-491b-8481-c9d068f2b3cd","Type":"ContainerDied","Data":"e8df06755d0761358aba4530a04d82f8dc0ead0bd1f2a1eace738c02f22eac14"} Dec 11 09:22:03 crc kubenswrapper[4788]: I1211 09:22:03.520210 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8df06755d0761358aba4530a04d82f8dc0ead0bd1f2a1eace738c02f22eac14" Dec 11 09:22:04 crc kubenswrapper[4788]: I1211 09:22:04.285172 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:22:04 crc kubenswrapper[4788]: I1211 09:22:04.288896 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-6xr2p" Dec 11 09:22:04 crc kubenswrapper[4788]: I1211 09:22:04.789794 4788 patch_prober.go:28] interesting pod/downloads-7954f5f757-dstfn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 11 09:22:04 crc kubenswrapper[4788]: I1211 09:22:04.789913 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dstfn" podUID="dcfe9e49-8116-4af5-84db-b958e5d3104b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 11 09:22:04 crc kubenswrapper[4788]: I1211 09:22:04.790125 4788 patch_prober.go:28] interesting pod/downloads-7954f5f757-dstfn container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 11 09:22:04 crc kubenswrapper[4788]: I1211 09:22:04.790158 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-dstfn" podUID="dcfe9e49-8116-4af5-84db-b958e5d3104b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 11 09:22:05 crc kubenswrapper[4788]: E1211 09:22:05.032725 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:05 crc kubenswrapper[4788]: E1211 09:22:05.034435 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:05 crc kubenswrapper[4788]: E1211 09:22:05.036344 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:05 crc kubenswrapper[4788]: E1211 09:22:05.036432 4788 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" podUID="37cb9609-c336-4c6b-86d5-1d680695f08e" containerName="kube-multus-additional-cni-plugins" Dec 11 09:22:12 crc kubenswrapper[4788]: I1211 09:22:12.988209 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:22:12 crc kubenswrapper[4788]: I1211 09:22:12.992394 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:22:14 crc kubenswrapper[4788]: I1211 09:22:14.808315 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-dstfn" Dec 11 09:22:15 crc kubenswrapper[4788]: E1211 09:22:15.033319 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:15 crc kubenswrapper[4788]: E1211 09:22:15.034804 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:15 crc kubenswrapper[4788]: E1211 09:22:15.036750 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:15 crc kubenswrapper[4788]: E1211 09:22:15.036821 4788 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" podUID="37cb9609-c336-4c6b-86d5-1d680695f08e" containerName="kube-multus-additional-cni-plugins" Dec 11 09:22:15 crc kubenswrapper[4788]: I1211 09:22:15.053119 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:22:15 crc kubenswrapper[4788]: I1211 09:22:15.512453 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 11 09:22:20 crc kubenswrapper[4788]: I1211 09:22:20.516625 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=5.516593508 podStartE2EDuration="5.516593508s" podCreationTimestamp="2025-12-11 09:22:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:22:20.514163018 +0000 UTC m=+70.584942664" watchObservedRunningTime="2025-12-11 09:22:20.516593508 +0000 UTC m=+70.587373134" Dec 11 09:22:23 crc kubenswrapper[4788]: I1211 09:22:23.646850 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-snc68_37cb9609-c336-4c6b-86d5-1d680695f08e/kube-multus-additional-cni-plugins/0.log" Dec 11 09:22:23 crc kubenswrapper[4788]: I1211 09:22:23.647130 4788 generic.go:334] "Generic (PLEG): container finished" podID="37cb9609-c336-4c6b-86d5-1d680695f08e" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" exitCode=137 Dec 11 09:22:23 crc kubenswrapper[4788]: I1211 09:22:23.647158 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" event={"ID":"37cb9609-c336-4c6b-86d5-1d680695f08e","Type":"ContainerDied","Data":"2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36"} Dec 11 09:22:24 crc kubenswrapper[4788]: I1211 09:22:24.996781 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-598q7" Dec 11 09:22:25 crc kubenswrapper[4788]: E1211 09:22:25.033469 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:25 crc kubenswrapper[4788]: E1211 09:22:25.035210 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:25 crc kubenswrapper[4788]: E1211 09:22:25.036140 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:25 crc kubenswrapper[4788]: E1211 09:22:25.036318 4788 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" podUID="37cb9609-c336-4c6b-86d5-1d680695f08e" containerName="kube-multus-additional-cni-plugins" Dec 11 09:22:25 crc kubenswrapper[4788]: I1211 09:22:25.735078 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.110791 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 09:22:33 crc kubenswrapper[4788]: E1211 09:22:33.111897 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8949ae02-daa2-4d9a-be86-341d33541b7c" containerName="pruner" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.111910 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8949ae02-daa2-4d9a-be86-341d33541b7c" containerName="pruner" Dec 11 09:22:33 crc kubenswrapper[4788]: E1211 09:22:33.111928 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="089fa2cc-3239-491b-8481-c9d068f2b3cd" containerName="pruner" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.111934 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="089fa2cc-3239-491b-8481-c9d068f2b3cd" containerName="pruner" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.112030 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="089fa2cc-3239-491b-8481-c9d068f2b3cd" containerName="pruner" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.112043 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="8949ae02-daa2-4d9a-be86-341d33541b7c" containerName="pruner" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.113116 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.120357 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.122925 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.123218 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.215348 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8ea369d3-c8b5-4d4a-8a21-19626e4477e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.215423 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8ea369d3-c8b5-4d4a-8a21-19626e4477e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.317454 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8ea369d3-c8b5-4d4a-8a21-19626e4477e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.317519 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8ea369d3-c8b5-4d4a-8a21-19626e4477e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.317618 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8ea369d3-c8b5-4d4a-8a21-19626e4477e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.351567 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8ea369d3-c8b5-4d4a-8a21-19626e4477e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 09:22:33 crc kubenswrapper[4788]: I1211 09:22:33.448338 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 09:22:34 crc kubenswrapper[4788]: I1211 09:22:34.519038 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 11 09:22:35 crc kubenswrapper[4788]: E1211 09:22:35.032182 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:35 crc kubenswrapper[4788]: E1211 09:22:35.032844 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:35 crc kubenswrapper[4788]: E1211 09:22:35.033750 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:35 crc kubenswrapper[4788]: E1211 09:22:35.033838 4788 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" podUID="37cb9609-c336-4c6b-86d5-1d680695f08e" containerName="kube-multus-additional-cni-plugins" Dec 11 09:22:38 crc kubenswrapper[4788]: I1211 09:22:38.912881 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 09:22:38 crc kubenswrapper[4788]: I1211 09:22:38.913780 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:22:38 crc kubenswrapper[4788]: I1211 09:22:38.927405 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 09:22:38 crc kubenswrapper[4788]: I1211 09:22:38.990632 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=4.990604128 podStartE2EDuration="4.990604128s" podCreationTimestamp="2025-12-11 09:22:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:22:38.954672739 +0000 UTC m=+89.025452365" watchObservedRunningTime="2025-12-11 09:22:38.990604128 +0000 UTC m=+89.061383714" Dec 11 09:22:39 crc kubenswrapper[4788]: I1211 09:22:39.002129 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-kubelet-dir\") pod \"installer-9-crc\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:22:39 crc kubenswrapper[4788]: I1211 09:22:39.002419 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/607b429c-8cbb-48ef-bc1a-7528f271aacb-kube-api-access\") pod \"installer-9-crc\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:22:39 crc kubenswrapper[4788]: I1211 09:22:39.002611 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-var-lock\") pod \"installer-9-crc\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:22:39 crc kubenswrapper[4788]: I1211 09:22:39.104103 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-var-lock\") pod \"installer-9-crc\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:22:39 crc kubenswrapper[4788]: I1211 09:22:39.104169 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-kubelet-dir\") pod \"installer-9-crc\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:22:39 crc kubenswrapper[4788]: I1211 09:22:39.104249 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-var-lock\") pod \"installer-9-crc\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:22:39 crc kubenswrapper[4788]: I1211 09:22:39.104259 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/607b429c-8cbb-48ef-bc1a-7528f271aacb-kube-api-access\") pod \"installer-9-crc\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:22:39 crc kubenswrapper[4788]: I1211 09:22:39.104367 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-kubelet-dir\") pod \"installer-9-crc\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:22:39 crc kubenswrapper[4788]: I1211 09:22:39.896162 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/607b429c-8cbb-48ef-bc1a-7528f271aacb-kube-api-access\") pod \"installer-9-crc\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:22:40 crc kubenswrapper[4788]: E1211 09:22:40.185218 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 11 09:22:40 crc kubenswrapper[4788]: E1211 09:22:40.185519 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6jrln,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-tcfwf_openshift-marketplace(b627dc3d-e993-47f0-94af-4ee2f832e7a5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 09:22:40 crc kubenswrapper[4788]: I1211 09:22:40.185692 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:22:40 crc kubenswrapper[4788]: E1211 09:22:40.186667 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-tcfwf" podUID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" Dec 11 09:22:45 crc kubenswrapper[4788]: E1211 09:22:45.031654 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:45 crc kubenswrapper[4788]: E1211 09:22:45.032905 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:45 crc kubenswrapper[4788]: E1211 09:22:45.033634 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 09:22:45 crc kubenswrapper[4788]: E1211 09:22:45.033721 4788 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" podUID="37cb9609-c336-4c6b-86d5-1d680695f08e" containerName="kube-multus-additional-cni-plugins" Dec 11 09:22:51 crc kubenswrapper[4788]: E1211 09:22:51.319988 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 11 09:22:51 crc kubenswrapper[4788]: E1211 09:22:51.321114 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m66d7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-j9fxc_openshift-marketplace(8e697aeb-5f1a-45f0-9c6e-5b65e638342c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 09:22:51 crc kubenswrapper[4788]: E1211 09:22:51.322546 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-j9fxc" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" Dec 11 09:22:51 crc kubenswrapper[4788]: E1211 09:22:51.347869 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 11 09:22:51 crc kubenswrapper[4788]: E1211 09:22:51.348534 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lnvv8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-wz9mc_openshift-marketplace(23f0f07c-d5d8-4a8c-8546-77e15ef979f5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 09:22:51 crc kubenswrapper[4788]: E1211 09:22:51.350496 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-wz9mc" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" Dec 11 09:22:53 crc kubenswrapper[4788]: E1211 09:22:53.287618 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-wz9mc" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" Dec 11 09:22:53 crc kubenswrapper[4788]: E1211 09:22:53.287691 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-j9fxc" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.347199 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-snc68_37cb9609-c336-4c6b-86d5-1d680695f08e/kube-multus-additional-cni-plugins/0.log" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.347318 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:22:53 crc kubenswrapper[4788]: E1211 09:22:53.386514 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 11 09:22:53 crc kubenswrapper[4788]: E1211 09:22:53.386748 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8xqgj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-9q655_openshift-marketplace(a35c01a8-01f0-48f9-a529-33ccc58161c9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 09:22:53 crc kubenswrapper[4788]: E1211 09:22:53.387947 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-9q655" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.410263 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92gl9\" (UniqueName: \"kubernetes.io/projected/37cb9609-c336-4c6b-86d5-1d680695f08e-kube-api-access-92gl9\") pod \"37cb9609-c336-4c6b-86d5-1d680695f08e\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.410439 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/37cb9609-c336-4c6b-86d5-1d680695f08e-tuning-conf-dir\") pod \"37cb9609-c336-4c6b-86d5-1d680695f08e\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.410594 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/37cb9609-c336-4c6b-86d5-1d680695f08e-ready\") pod \"37cb9609-c336-4c6b-86d5-1d680695f08e\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.410582 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/37cb9609-c336-4c6b-86d5-1d680695f08e-tuning-conf-dir" (OuterVolumeSpecName: "tuning-conf-dir") pod "37cb9609-c336-4c6b-86d5-1d680695f08e" (UID: "37cb9609-c336-4c6b-86d5-1d680695f08e"). InnerVolumeSpecName "tuning-conf-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.410810 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/37cb9609-c336-4c6b-86d5-1d680695f08e-cni-sysctl-allowlist\") pod \"37cb9609-c336-4c6b-86d5-1d680695f08e\" (UID: \"37cb9609-c336-4c6b-86d5-1d680695f08e\") " Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.410977 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37cb9609-c336-4c6b-86d5-1d680695f08e-ready" (OuterVolumeSpecName: "ready") pod "37cb9609-c336-4c6b-86d5-1d680695f08e" (UID: "37cb9609-c336-4c6b-86d5-1d680695f08e"). InnerVolumeSpecName "ready". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.411390 4788 reconciler_common.go:293] "Volume detached for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/37cb9609-c336-4c6b-86d5-1d680695f08e-tuning-conf-dir\") on node \"crc\" DevicePath \"\"" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.411425 4788 reconciler_common.go:293] "Volume detached for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/37cb9609-c336-4c6b-86d5-1d680695f08e-ready\") on node \"crc\" DevicePath \"\"" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.411770 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37cb9609-c336-4c6b-86d5-1d680695f08e-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "37cb9609-c336-4c6b-86d5-1d680695f08e" (UID: "37cb9609-c336-4c6b-86d5-1d680695f08e"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.416806 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37cb9609-c336-4c6b-86d5-1d680695f08e-kube-api-access-92gl9" (OuterVolumeSpecName: "kube-api-access-92gl9") pod "37cb9609-c336-4c6b-86d5-1d680695f08e" (UID: "37cb9609-c336-4c6b-86d5-1d680695f08e"). InnerVolumeSpecName "kube-api-access-92gl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.512634 4788 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/37cb9609-c336-4c6b-86d5-1d680695f08e-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.512691 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92gl9\" (UniqueName: \"kubernetes.io/projected/37cb9609-c336-4c6b-86d5-1d680695f08e-kube-api-access-92gl9\") on node \"crc\" DevicePath \"\"" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.819828 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-snc68_37cb9609-c336-4c6b-86d5-1d680695f08e/kube-multus-additional-cni-plugins/0.log" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.820934 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.821034 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-snc68" event={"ID":"37cb9609-c336-4c6b-86d5-1d680695f08e","Type":"ContainerDied","Data":"fd7698abaab57d0fa7cc0a55bcd6e2338ff970a41095f259fd745a6c63305518"} Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.821131 4788 scope.go:117] "RemoveContainer" containerID="2d07056c27a21a3af1ca7c8b3377825bd6fa0effea938cefd4da1dfb4ec2be36" Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.867726 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-snc68"] Dec 11 09:22:53 crc kubenswrapper[4788]: I1211 09:22:53.876257 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-snc68"] Dec 11 09:22:54 crc kubenswrapper[4788]: I1211 09:22:54.503720 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37cb9609-c336-4c6b-86d5-1d680695f08e" path="/var/lib/kubelet/pods/37cb9609-c336-4c6b-86d5-1d680695f08e/volumes" Dec 11 09:22:56 crc kubenswrapper[4788]: E1211 09:22:56.572664 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-9q655" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" Dec 11 09:22:56 crc kubenswrapper[4788]: E1211 09:22:56.597089 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 11 09:22:56 crc kubenswrapper[4788]: E1211 09:22:56.597464 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cvpcj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-6zfxb_openshift-marketplace(58eec917-a769-40a8-b458-6c7b189dcb19): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 09:22:56 crc kubenswrapper[4788]: E1211 09:22:56.599020 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-6zfxb" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" Dec 11 09:22:56 crc kubenswrapper[4788]: E1211 09:22:56.662881 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 11 09:22:56 crc kubenswrapper[4788]: E1211 09:22:56.663444 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lv6x4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-c9gz6_openshift-marketplace(38884c7e-079f-4133-99b7-a8232008072d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 09:22:56 crc kubenswrapper[4788]: E1211 09:22:56.665014 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-c9gz6" podUID="38884c7e-079f-4133-99b7-a8232008072d" Dec 11 09:22:58 crc kubenswrapper[4788]: E1211 09:22:58.143854 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-6zfxb" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" Dec 11 09:22:58 crc kubenswrapper[4788]: E1211 09:22:58.144279 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-c9gz6" podUID="38884c7e-079f-4133-99b7-a8232008072d" Dec 11 09:22:58 crc kubenswrapper[4788]: E1211 09:22:58.223123 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 11 09:22:58 crc kubenswrapper[4788]: E1211 09:22:58.223699 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dh8l8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-mdf99_openshift-marketplace(1cc339d9-17c7-4461-bc56-a6c12f422aa9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 09:22:58 crc kubenswrapper[4788]: E1211 09:22:58.225472 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-mdf99" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" Dec 11 09:22:58 crc kubenswrapper[4788]: E1211 09:22:58.257917 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 11 09:22:58 crc kubenswrapper[4788]: E1211 09:22:58.258080 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xfc4n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-4cclk_openshift-marketplace(efaaaaf2-7778-46d4-9400-7c31a2f82765): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 09:22:58 crc kubenswrapper[4788]: E1211 09:22:58.259265 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-4cclk" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" Dec 11 09:22:58 crc kubenswrapper[4788]: I1211 09:22:58.440053 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 09:22:58 crc kubenswrapper[4788]: W1211 09:22:58.448816 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod8ea369d3_c8b5_4d4a_8a21_19626e4477e6.slice/crio-cf0191f542919171b12955acc153b5093d8884c74acc93f5bc4d4abfb49121df WatchSource:0}: Error finding container cf0191f542919171b12955acc153b5093d8884c74acc93f5bc4d4abfb49121df: Status 404 returned error can't find the container with id cf0191f542919171b12955acc153b5093d8884c74acc93f5bc4d4abfb49121df Dec 11 09:22:58 crc kubenswrapper[4788]: I1211 09:22:58.593479 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 09:22:58 crc kubenswrapper[4788]: W1211 09:22:58.601032 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod607b429c_8cbb_48ef_bc1a_7528f271aacb.slice/crio-d6a68e18ce1b25fccb49d8924719d4c0d28a0790f86507d0bd3100d6c886906c WatchSource:0}: Error finding container d6a68e18ce1b25fccb49d8924719d4c0d28a0790f86507d0bd3100d6c886906c: Status 404 returned error can't find the container with id d6a68e18ce1b25fccb49d8924719d4c0d28a0790f86507d0bd3100d6c886906c Dec 11 09:22:58 crc kubenswrapper[4788]: I1211 09:22:58.850275 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8ea369d3-c8b5-4d4a-8a21-19626e4477e6","Type":"ContainerStarted","Data":"4103d3e6c4aeb58b84efd383088888769b8b0006b5a770e4a6eec901c4c5915c"} Dec 11 09:22:58 crc kubenswrapper[4788]: I1211 09:22:58.850933 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8ea369d3-c8b5-4d4a-8a21-19626e4477e6","Type":"ContainerStarted","Data":"cf0191f542919171b12955acc153b5093d8884c74acc93f5bc4d4abfb49121df"} Dec 11 09:22:58 crc kubenswrapper[4788]: I1211 09:22:58.851172 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"607b429c-8cbb-48ef-bc1a-7528f271aacb","Type":"ContainerStarted","Data":"d6a68e18ce1b25fccb49d8924719d4c0d28a0790f86507d0bd3100d6c886906c"} Dec 11 09:22:58 crc kubenswrapper[4788]: I1211 09:22:58.852987 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tcfwf" event={"ID":"b627dc3d-e993-47f0-94af-4ee2f832e7a5","Type":"ContainerStarted","Data":"c4b2b2747074633d6d0f551e082338fe0fbb48251c8cbe2f1fc816e773c37135"} Dec 11 09:22:58 crc kubenswrapper[4788]: E1211 09:22:58.855753 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-4cclk" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" Dec 11 09:22:58 crc kubenswrapper[4788]: E1211 09:22:58.856369 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-mdf99" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" Dec 11 09:22:59 crc kubenswrapper[4788]: I1211 09:22:59.863192 4788 generic.go:334] "Generic (PLEG): container finished" podID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" containerID="c4b2b2747074633d6d0f551e082338fe0fbb48251c8cbe2f1fc816e773c37135" exitCode=0 Dec 11 09:22:59 crc kubenswrapper[4788]: I1211 09:22:59.863507 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tcfwf" event={"ID":"b627dc3d-e993-47f0-94af-4ee2f832e7a5","Type":"ContainerDied","Data":"c4b2b2747074633d6d0f551e082338fe0fbb48251c8cbe2f1fc816e773c37135"} Dec 11 09:22:59 crc kubenswrapper[4788]: I1211 09:22:59.865959 4788 generic.go:334] "Generic (PLEG): container finished" podID="8ea369d3-c8b5-4d4a-8a21-19626e4477e6" containerID="4103d3e6c4aeb58b84efd383088888769b8b0006b5a770e4a6eec901c4c5915c" exitCode=0 Dec 11 09:22:59 crc kubenswrapper[4788]: I1211 09:22:59.866039 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8ea369d3-c8b5-4d4a-8a21-19626e4477e6","Type":"ContainerDied","Data":"4103d3e6c4aeb58b84efd383088888769b8b0006b5a770e4a6eec901c4c5915c"} Dec 11 09:22:59 crc kubenswrapper[4788]: I1211 09:22:59.869395 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"607b429c-8cbb-48ef-bc1a-7528f271aacb","Type":"ContainerStarted","Data":"86d51bbb2ad4a96789d93b0d5ab39a835ba69fc8994f7b1eb9d62f3ba58d9215"} Dec 11 09:22:59 crc kubenswrapper[4788]: I1211 09:22:59.915785 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=21.915762271 podStartE2EDuration="21.915762271s" podCreationTimestamp="2025-12-11 09:22:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:22:59.91570276 +0000 UTC m=+109.986482346" watchObservedRunningTime="2025-12-11 09:22:59.915762271 +0000 UTC m=+109.986541857" Dec 11 09:23:00 crc kubenswrapper[4788]: I1211 09:23:00.878203 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tcfwf" event={"ID":"b627dc3d-e993-47f0-94af-4ee2f832e7a5","Type":"ContainerStarted","Data":"803c32e8a3fcfed22daa9c67064173f17755719399bbe428eb2ff67048589aa7"} Dec 11 09:23:00 crc kubenswrapper[4788]: I1211 09:23:00.903320 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tcfwf" podStartSLOduration=3.519785335 podStartE2EDuration="1m8.903296143s" podCreationTimestamp="2025-12-11 09:21:52 +0000 UTC" firstStartedPulling="2025-12-11 09:21:55.068174138 +0000 UTC m=+45.138953724" lastFinishedPulling="2025-12-11 09:23:00.451684946 +0000 UTC m=+110.522464532" observedRunningTime="2025-12-11 09:23:00.900730649 +0000 UTC m=+110.971510235" watchObservedRunningTime="2025-12-11 09:23:00.903296143 +0000 UTC m=+110.974075729" Dec 11 09:23:01 crc kubenswrapper[4788]: I1211 09:23:01.145274 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 09:23:01 crc kubenswrapper[4788]: I1211 09:23:01.242205 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kube-api-access\") pod \"8ea369d3-c8b5-4d4a-8a21-19626e4477e6\" (UID: \"8ea369d3-c8b5-4d4a-8a21-19626e4477e6\") " Dec 11 09:23:01 crc kubenswrapper[4788]: I1211 09:23:01.242305 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kubelet-dir\") pod \"8ea369d3-c8b5-4d4a-8a21-19626e4477e6\" (UID: \"8ea369d3-c8b5-4d4a-8a21-19626e4477e6\") " Dec 11 09:23:01 crc kubenswrapper[4788]: I1211 09:23:01.242362 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8ea369d3-c8b5-4d4a-8a21-19626e4477e6" (UID: "8ea369d3-c8b5-4d4a-8a21-19626e4477e6"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:23:01 crc kubenswrapper[4788]: I1211 09:23:01.242621 4788 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:01 crc kubenswrapper[4788]: I1211 09:23:01.248642 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8ea369d3-c8b5-4d4a-8a21-19626e4477e6" (UID: "8ea369d3-c8b5-4d4a-8a21-19626e4477e6"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:23:01 crc kubenswrapper[4788]: I1211 09:23:01.345885 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ea369d3-c8b5-4d4a-8a21-19626e4477e6-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:01 crc kubenswrapper[4788]: I1211 09:23:01.898040 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 09:23:01 crc kubenswrapper[4788]: I1211 09:23:01.898168 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8ea369d3-c8b5-4d4a-8a21-19626e4477e6","Type":"ContainerDied","Data":"cf0191f542919171b12955acc153b5093d8884c74acc93f5bc4d4abfb49121df"} Dec 11 09:23:01 crc kubenswrapper[4788]: I1211 09:23:01.899261 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf0191f542919171b12955acc153b5093d8884c74acc93f5bc4d4abfb49121df" Dec 11 09:23:01 crc kubenswrapper[4788]: E1211 09:23:01.971366 4788 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pod8ea369d3_c8b5_4d4a_8a21_19626e4477e6.slice\": RecentStats: unable to find data in memory cache]" Dec 11 09:23:02 crc kubenswrapper[4788]: I1211 09:23:02.971406 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:23:02 crc kubenswrapper[4788]: I1211 09:23:02.971514 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:23:03 crc kubenswrapper[4788]: I1211 09:23:03.180726 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:23:04 crc kubenswrapper[4788]: I1211 09:23:04.101496 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zbjxm"] Dec 11 09:23:08 crc kubenswrapper[4788]: I1211 09:23:08.942454 4788 generic.go:334] "Generic (PLEG): container finished" podID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerID="4c170b741a1a8948ab6156accbddbdd747061a968e119e31883aa668384f7afe" exitCode=0 Dec 11 09:23:08 crc kubenswrapper[4788]: I1211 09:23:08.942550 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz9mc" event={"ID":"23f0f07c-d5d8-4a8c-8546-77e15ef979f5","Type":"ContainerDied","Data":"4c170b741a1a8948ab6156accbddbdd747061a968e119e31883aa668384f7afe"} Dec 11 09:23:09 crc kubenswrapper[4788]: I1211 09:23:09.953357 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9fxc" event={"ID":"8e697aeb-5f1a-45f0-9c6e-5b65e638342c","Type":"ContainerStarted","Data":"c98ad4544a4600f178c3914fb99681ff6ae5b3d641ffee4b6cfbabe422bee64e"} Dec 11 09:23:09 crc kubenswrapper[4788]: I1211 09:23:09.958656 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz9mc" event={"ID":"23f0f07c-d5d8-4a8c-8546-77e15ef979f5","Type":"ContainerStarted","Data":"25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051"} Dec 11 09:23:09 crc kubenswrapper[4788]: I1211 09:23:09.996700 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wz9mc" podStartSLOduration=4.809700004 podStartE2EDuration="1m18.996672391s" podCreationTimestamp="2025-12-11 09:21:51 +0000 UTC" firstStartedPulling="2025-12-11 09:21:55.218383932 +0000 UTC m=+45.289163518" lastFinishedPulling="2025-12-11 09:23:09.405356319 +0000 UTC m=+119.476135905" observedRunningTime="2025-12-11 09:23:09.993895291 +0000 UTC m=+120.064674867" watchObservedRunningTime="2025-12-11 09:23:09.996672391 +0000 UTC m=+120.067451977" Dec 11 09:23:10 crc kubenswrapper[4788]: I1211 09:23:10.966599 4788 generic.go:334] "Generic (PLEG): container finished" podID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerID="c98ad4544a4600f178c3914fb99681ff6ae5b3d641ffee4b6cfbabe422bee64e" exitCode=0 Dec 11 09:23:10 crc kubenswrapper[4788]: I1211 09:23:10.966702 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9fxc" event={"ID":"8e697aeb-5f1a-45f0-9c6e-5b65e638342c","Type":"ContainerDied","Data":"c98ad4544a4600f178c3914fb99681ff6ae5b3d641ffee4b6cfbabe422bee64e"} Dec 11 09:23:11 crc kubenswrapper[4788]: I1211 09:23:11.993784 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zfxb" event={"ID":"58eec917-a769-40a8-b458-6c7b189dcb19","Type":"ContainerStarted","Data":"d02b7bace31913625512f12ba4ad260ace16bae9cbc315c1478f2c17a69e1403"} Dec 11 09:23:12 crc kubenswrapper[4788]: I1211 09:23:12.590397 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:23:12 crc kubenswrapper[4788]: I1211 09:23:12.591117 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:23:13 crc kubenswrapper[4788]: I1211 09:23:13.022892 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:23:13 crc kubenswrapper[4788]: I1211 09:23:13.657186 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-wz9mc" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerName="registry-server" probeResult="failure" output=< Dec 11 09:23:13 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 09:23:13 crc kubenswrapper[4788]: > Dec 11 09:23:14 crc kubenswrapper[4788]: I1211 09:23:14.009491 4788 generic.go:334] "Generic (PLEG): container finished" podID="58eec917-a769-40a8-b458-6c7b189dcb19" containerID="d02b7bace31913625512f12ba4ad260ace16bae9cbc315c1478f2c17a69e1403" exitCode=0 Dec 11 09:23:14 crc kubenswrapper[4788]: I1211 09:23:14.009576 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zfxb" event={"ID":"58eec917-a769-40a8-b458-6c7b189dcb19","Type":"ContainerDied","Data":"d02b7bace31913625512f12ba4ad260ace16bae9cbc315c1478f2c17a69e1403"} Dec 11 09:23:15 crc kubenswrapper[4788]: I1211 09:23:15.787884 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tcfwf"] Dec 11 09:23:15 crc kubenswrapper[4788]: I1211 09:23:15.788123 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tcfwf" podUID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" containerName="registry-server" containerID="cri-o://803c32e8a3fcfed22daa9c67064173f17755719399bbe428eb2ff67048589aa7" gracePeriod=2 Dec 11 09:23:20 crc kubenswrapper[4788]: I1211 09:23:20.041919 4788 generic.go:334] "Generic (PLEG): container finished" podID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" containerID="803c32e8a3fcfed22daa9c67064173f17755719399bbe428eb2ff67048589aa7" exitCode=0 Dec 11 09:23:20 crc kubenswrapper[4788]: I1211 09:23:20.042009 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tcfwf" event={"ID":"b627dc3d-e993-47f0-94af-4ee2f832e7a5","Type":"ContainerDied","Data":"803c32e8a3fcfed22daa9c67064173f17755719399bbe428eb2ff67048589aa7"} Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.176050 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.366631 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-catalog-content\") pod \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.367271 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-utilities\") pod \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.367528 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jrln\" (UniqueName: \"kubernetes.io/projected/b627dc3d-e993-47f0-94af-4ee2f832e7a5-kube-api-access-6jrln\") pod \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\" (UID: \"b627dc3d-e993-47f0-94af-4ee2f832e7a5\") " Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.368020 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-utilities" (OuterVolumeSpecName: "utilities") pod "b627dc3d-e993-47f0-94af-4ee2f832e7a5" (UID: "b627dc3d-e993-47f0-94af-4ee2f832e7a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.375647 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b627dc3d-e993-47f0-94af-4ee2f832e7a5-kube-api-access-6jrln" (OuterVolumeSpecName: "kube-api-access-6jrln") pod "b627dc3d-e993-47f0-94af-4ee2f832e7a5" (UID: "b627dc3d-e993-47f0-94af-4ee2f832e7a5"). InnerVolumeSpecName "kube-api-access-6jrln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.418954 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b627dc3d-e993-47f0-94af-4ee2f832e7a5" (UID: "b627dc3d-e993-47f0-94af-4ee2f832e7a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.469593 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jrln\" (UniqueName: \"kubernetes.io/projected/b627dc3d-e993-47f0-94af-4ee2f832e7a5-kube-api-access-6jrln\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.469645 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.469657 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b627dc3d-e993-47f0-94af-4ee2f832e7a5-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.549650 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:23:22 crc kubenswrapper[4788]: I1211 09:23:22.590185 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:23:23 crc kubenswrapper[4788]: I1211 09:23:23.062150 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tcfwf" event={"ID":"b627dc3d-e993-47f0-94af-4ee2f832e7a5","Type":"ContainerDied","Data":"104e556215bcd2b710977dabd6b347096d21ab708d0351f4cf5d177bc12d4fec"} Dec 11 09:23:23 crc kubenswrapper[4788]: I1211 09:23:23.062289 4788 scope.go:117] "RemoveContainer" containerID="803c32e8a3fcfed22daa9c67064173f17755719399bbe428eb2ff67048589aa7" Dec 11 09:23:23 crc kubenswrapper[4788]: I1211 09:23:23.062186 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tcfwf" Dec 11 09:23:23 crc kubenswrapper[4788]: I1211 09:23:23.089128 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tcfwf"] Dec 11 09:23:23 crc kubenswrapper[4788]: I1211 09:23:23.094484 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tcfwf"] Dec 11 09:23:24 crc kubenswrapper[4788]: I1211 09:23:24.503319 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" path="/var/lib/kubelet/pods/b627dc3d-e993-47f0-94af-4ee2f832e7a5/volumes" Dec 11 09:23:25 crc kubenswrapper[4788]: I1211 09:23:25.438629 4788 scope.go:117] "RemoveContainer" containerID="c4b2b2747074633d6d0f551e082338fe0fbb48251c8cbe2f1fc816e773c37135" Dec 11 09:23:28 crc kubenswrapper[4788]: I1211 09:23:28.180797 4788 scope.go:117] "RemoveContainer" containerID="0b596b05f7147bdc84e0e637b9f8c8b669ba7cec060b7285744abbe5a030bb5b" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.132192 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" podUID="01badcd8-dec7-4e94-8016-1ccc6a06a7cc" containerName="oauth-openshift" containerID="cri-o://37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80" gracePeriod=15 Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.729810 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.765898 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5d4df5b879-7trhq"] Dec 11 09:23:29 crc kubenswrapper[4788]: E1211 09:23:29.766145 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" containerName="extract-content" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.766161 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" containerName="extract-content" Dec 11 09:23:29 crc kubenswrapper[4788]: E1211 09:23:29.766172 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" containerName="registry-server" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.766179 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" containerName="registry-server" Dec 11 09:23:29 crc kubenswrapper[4788]: E1211 09:23:29.766192 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ea369d3-c8b5-4d4a-8a21-19626e4477e6" containerName="pruner" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.766200 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ea369d3-c8b5-4d4a-8a21-19626e4477e6" containerName="pruner" Dec 11 09:23:29 crc kubenswrapper[4788]: E1211 09:23:29.766215 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" containerName="extract-utilities" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.766222 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" containerName="extract-utilities" Dec 11 09:23:29 crc kubenswrapper[4788]: E1211 09:23:29.766748 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01badcd8-dec7-4e94-8016-1ccc6a06a7cc" containerName="oauth-openshift" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.766757 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="01badcd8-dec7-4e94-8016-1ccc6a06a7cc" containerName="oauth-openshift" Dec 11 09:23:29 crc kubenswrapper[4788]: E1211 09:23:29.766768 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37cb9609-c336-4c6b-86d5-1d680695f08e" containerName="kube-multus-additional-cni-plugins" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.766775 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="37cb9609-c336-4c6b-86d5-1d680695f08e" containerName="kube-multus-additional-cni-plugins" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.766911 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b627dc3d-e993-47f0-94af-4ee2f832e7a5" containerName="registry-server" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.766922 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="01badcd8-dec7-4e94-8016-1ccc6a06a7cc" containerName="oauth-openshift" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.766933 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ea369d3-c8b5-4d4a-8a21-19626e4477e6" containerName="pruner" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.766943 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="37cb9609-c336-4c6b-86d5-1d680695f08e" containerName="kube-multus-additional-cni-plugins" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.769336 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.795157 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5d4df5b879-7trhq"] Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.866719 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-cliconfig\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.866799 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-login\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.866827 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-serving-cert\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.866853 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-ocp-branding-template\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.866884 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-policies\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.866905 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-trusted-ca-bundle\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.866934 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-error\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.866951 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-session\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.866970 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wlbm\" (UniqueName: \"kubernetes.io/projected/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-kube-api-access-9wlbm\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.867011 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-idp-0-file-data\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.867045 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-router-certs\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.867066 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-dir\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.867094 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-provider-selection\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.867113 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-service-ca\") pod \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\" (UID: \"01badcd8-dec7-4e94-8016-1ccc6a06a7cc\") " Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.867549 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.867749 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.868883 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.870341 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.871253 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.878803 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.879107 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.879673 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.881723 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.881847 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.885202 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.885634 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-kube-api-access-9wlbm" (OuterVolumeSpecName: "kube-api-access-9wlbm") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "kube-api-access-9wlbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.885861 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.886155 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "01badcd8-dec7-4e94-8016-1ccc6a06a7cc" (UID: "01badcd8-dec7-4e94-8016-1ccc6a06a7cc"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.968840 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.968901 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.968924 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.968955 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.968984 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/495ab0ed-d898-45ee-ab91-c707a32f7e36-audit-dir\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969011 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969038 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-template-login\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969062 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d98lm\" (UniqueName: \"kubernetes.io/projected/495ab0ed-d898-45ee-ab91-c707a32f7e36-kube-api-access-d98lm\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969086 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-template-error\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969119 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-service-ca\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969148 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969195 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-audit-policies\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969218 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-router-certs\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969259 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-session\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969305 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969319 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969331 4788 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969347 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969360 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969372 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969385 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969397 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969410 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969422 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969434 4788 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969446 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969460 4788 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:29 crc kubenswrapper[4788]: I1211 09:23:29.969475 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wlbm\" (UniqueName: \"kubernetes.io/projected/01badcd8-dec7-4e94-8016-1ccc6a06a7cc-kube-api-access-9wlbm\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.130696 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-audit-policies\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.130774 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-router-certs\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.130802 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-session\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.130884 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.130913 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.130929 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.130956 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.130978 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/495ab0ed-d898-45ee-ab91-c707a32f7e36-audit-dir\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.131011 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.131035 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-template-login\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.131051 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d98lm\" (UniqueName: \"kubernetes.io/projected/495ab0ed-d898-45ee-ab91-c707a32f7e36-kube-api-access-d98lm\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.131070 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-template-error\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.131102 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-service-ca\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.131124 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.131663 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-audit-policies\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.138883 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-service-ca\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.142797 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-template-login\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.142909 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/495ab0ed-d898-45ee-ab91-c707a32f7e36-audit-dir\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.144219 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.144767 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.147396 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-template-error\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.147718 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.148735 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.148975 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-router-certs\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.153508 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-session\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.158157 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.168688 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/495ab0ed-d898-45ee-ab91-c707a32f7e36-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.171049 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d98lm\" (UniqueName: \"kubernetes.io/projected/495ab0ed-d898-45ee-ab91-c707a32f7e36-kube-api-access-d98lm\") pod \"oauth-openshift-5d4df5b879-7trhq\" (UID: \"495ab0ed-d898-45ee-ab91-c707a32f7e36\") " pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.259176 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9fxc" event={"ID":"8e697aeb-5f1a-45f0-9c6e-5b65e638342c","Type":"ContainerStarted","Data":"5b9661be0df648975704fa9807f79d8bc754a613ac99bc516de352fbc3ed1413"} Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.262220 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zfxb" event={"ID":"58eec917-a769-40a8-b458-6c7b189dcb19","Type":"ContainerStarted","Data":"7b76830ffbc3d4e95d517e58be7fa2c57fa5c19e031060bf090f12ab48f90054"} Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.263750 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9gz6" event={"ID":"38884c7e-079f-4133-99b7-a8232008072d","Type":"ContainerStarted","Data":"1e8f76d5cd5adf61de6a2ba8c38a0b3e4f95c3190701263bc7d73b1346b950dc"} Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.271109 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cclk" event={"ID":"efaaaaf2-7778-46d4-9400-7c31a2f82765","Type":"ContainerStarted","Data":"9c4d1ac02cdd0e2e7cc6d823ac590704d93f3ac0a3d7195ffc3840a8c52f041b"} Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.273958 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9q655" event={"ID":"a35c01a8-01f0-48f9-a529-33ccc58161c9","Type":"ContainerStarted","Data":"460c132537fbab015f5d1d59f90a9674778bc3d82011ceff7d97ac2e147d3cef"} Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.276766 4788 generic.go:334] "Generic (PLEG): container finished" podID="01badcd8-dec7-4e94-8016-1ccc6a06a7cc" containerID="37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80" exitCode=0 Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.276843 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.276895 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" event={"ID":"01badcd8-dec7-4e94-8016-1ccc6a06a7cc","Type":"ContainerDied","Data":"37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80"} Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.276922 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zbjxm" event={"ID":"01badcd8-dec7-4e94-8016-1ccc6a06a7cc","Type":"ContainerDied","Data":"7ebb8e8593fb0566938322eb2d9c8c5ff2844a51bc3d00c6ea8cfacbb8cff182"} Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.276938 4788 scope.go:117] "RemoveContainer" containerID="37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.280776 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdf99" event={"ID":"1cc339d9-17c7-4461-bc56-a6c12f422aa9","Type":"ContainerStarted","Data":"97be21c8ae1f82fa649a398333f9a9e5c8c137b27e49946b598cd82fe4ecb2e5"} Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.301293 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-j9fxc" podStartSLOduration=7.182933178 podStartE2EDuration="1m35.301265412s" podCreationTimestamp="2025-12-11 09:21:55 +0000 UTC" firstStartedPulling="2025-12-11 09:21:57.320512868 +0000 UTC m=+47.391292454" lastFinishedPulling="2025-12-11 09:23:25.438845102 +0000 UTC m=+135.509624688" observedRunningTime="2025-12-11 09:23:30.296629056 +0000 UTC m=+140.367408642" watchObservedRunningTime="2025-12-11 09:23:30.301265412 +0000 UTC m=+140.372044998" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.356970 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6zfxb" podStartSLOduration=4.262448623 podStartE2EDuration="1m35.356937952s" podCreationTimestamp="2025-12-11 09:21:55 +0000 UTC" firstStartedPulling="2025-12-11 09:21:58.36629871 +0000 UTC m=+48.437078296" lastFinishedPulling="2025-12-11 09:23:29.460788029 +0000 UTC m=+139.531567625" observedRunningTime="2025-12-11 09:23:30.319922618 +0000 UTC m=+140.390702204" watchObservedRunningTime="2025-12-11 09:23:30.356937952 +0000 UTC m=+140.427717538" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.375596 4788 scope.go:117] "RemoveContainer" containerID="37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80" Dec 11 09:23:30 crc kubenswrapper[4788]: E1211 09:23:30.376105 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80\": container with ID starting with 37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80 not found: ID does not exist" containerID="37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.378436 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80"} err="failed to get container status \"37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80\": rpc error: code = NotFound desc = could not find container \"37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80\": container with ID starting with 37949c6c7552a93fb3475bc7c3c1e4ec2e3edf1ea6d675d076ec4b50aa913f80 not found: ID does not exist" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.403978 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.443000 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zbjxm"] Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.449878 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zbjxm"] Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.571515 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01badcd8-dec7-4e94-8016-1ccc6a06a7cc" path="/var/lib/kubelet/pods/01badcd8-dec7-4e94-8016-1ccc6a06a7cc/volumes" Dec 11 09:23:30 crc kubenswrapper[4788]: I1211 09:23:30.927468 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5d4df5b879-7trhq"] Dec 11 09:23:30 crc kubenswrapper[4788]: W1211 09:23:30.936497 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod495ab0ed_d898_45ee_ab91_c707a32f7e36.slice/crio-26d92d4175287a15042710381a8dcec3d54a7b7db84d880fe7edc5712e2563a8 WatchSource:0}: Error finding container 26d92d4175287a15042710381a8dcec3d54a7b7db84d880fe7edc5712e2563a8: Status 404 returned error can't find the container with id 26d92d4175287a15042710381a8dcec3d54a7b7db84d880fe7edc5712e2563a8 Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.290549 4788 generic.go:334] "Generic (PLEG): container finished" podID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerID="460c132537fbab015f5d1d59f90a9674778bc3d82011ceff7d97ac2e147d3cef" exitCode=0 Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.290659 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9q655" event={"ID":"a35c01a8-01f0-48f9-a529-33ccc58161c9","Type":"ContainerDied","Data":"460c132537fbab015f5d1d59f90a9674778bc3d82011ceff7d97ac2e147d3cef"} Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.303080 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" event={"ID":"495ab0ed-d898-45ee-ab91-c707a32f7e36","Type":"ContainerStarted","Data":"62f3fc15df99107b4d9bea2d7a7cc3da96cd837ab801fd96719bc71289c6687c"} Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.303154 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" event={"ID":"495ab0ed-d898-45ee-ab91-c707a32f7e36","Type":"ContainerStarted","Data":"26d92d4175287a15042710381a8dcec3d54a7b7db84d880fe7edc5712e2563a8"} Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.303994 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.309588 4788 generic.go:334] "Generic (PLEG): container finished" podID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" containerID="97be21c8ae1f82fa649a398333f9a9e5c8c137b27e49946b598cd82fe4ecb2e5" exitCode=0 Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.309677 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdf99" event={"ID":"1cc339d9-17c7-4461-bc56-a6c12f422aa9","Type":"ContainerDied","Data":"97be21c8ae1f82fa649a398333f9a9e5c8c137b27e49946b598cd82fe4ecb2e5"} Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.311655 4788 generic.go:334] "Generic (PLEG): container finished" podID="38884c7e-079f-4133-99b7-a8232008072d" containerID="1e8f76d5cd5adf61de6a2ba8c38a0b3e4f95c3190701263bc7d73b1346b950dc" exitCode=0 Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.311703 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9gz6" event={"ID":"38884c7e-079f-4133-99b7-a8232008072d","Type":"ContainerDied","Data":"1e8f76d5cd5adf61de6a2ba8c38a0b3e4f95c3190701263bc7d73b1346b950dc"} Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.322656 4788 generic.go:334] "Generic (PLEG): container finished" podID="efaaaaf2-7778-46d4-9400-7c31a2f82765" containerID="9c4d1ac02cdd0e2e7cc6d823ac590704d93f3ac0a3d7195ffc3840a8c52f041b" exitCode=0 Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.322739 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cclk" event={"ID":"efaaaaf2-7778-46d4-9400-7c31a2f82765","Type":"ContainerDied","Data":"9c4d1ac02cdd0e2e7cc6d823ac590704d93f3ac0a3d7195ffc3840a8c52f041b"} Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.374560 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" podStartSLOduration=27.374531236 podStartE2EDuration="27.374531236s" podCreationTimestamp="2025-12-11 09:23:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:23:31.3726644 +0000 UTC m=+141.443444016" watchObservedRunningTime="2025-12-11 09:23:31.374531236 +0000 UTC m=+141.445310822" Dec 11 09:23:31 crc kubenswrapper[4788]: I1211 09:23:31.688171 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5d4df5b879-7trhq" Dec 11 09:23:32 crc kubenswrapper[4788]: I1211 09:23:32.330918 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9gz6" event={"ID":"38884c7e-079f-4133-99b7-a8232008072d","Type":"ContainerStarted","Data":"76f88d0006c2be8df672ce76cb15150ac69764891b00cb55ef0e882542e8a85e"} Dec 11 09:23:32 crc kubenswrapper[4788]: I1211 09:23:32.333880 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cclk" event={"ID":"efaaaaf2-7778-46d4-9400-7c31a2f82765","Type":"ContainerStarted","Data":"b27b67a371c9f92bed329674864dbddd3f13829d954e77f8e17a938d1667a3d3"} Dec 11 09:23:32 crc kubenswrapper[4788]: I1211 09:23:32.336489 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9q655" event={"ID":"a35c01a8-01f0-48f9-a529-33ccc58161c9","Type":"ContainerStarted","Data":"c8983eecf82b198da20f3f0de59738ba5ec534ead82e96aec0e7464a6263e5b5"} Dec 11 09:23:32 crc kubenswrapper[4788]: I1211 09:23:32.338955 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdf99" event={"ID":"1cc339d9-17c7-4461-bc56-a6c12f422aa9","Type":"ContainerStarted","Data":"4954871b104e079f648869914bea58e85ea568990610aab03fb0ca670317321a"} Dec 11 09:23:32 crc kubenswrapper[4788]: I1211 09:23:32.421441 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c9gz6" podStartSLOduration=3.78566556 podStartE2EDuration="1m40.421415212s" podCreationTimestamp="2025-12-11 09:21:52 +0000 UTC" firstStartedPulling="2025-12-11 09:21:55.088874885 +0000 UTC m=+45.159654471" lastFinishedPulling="2025-12-11 09:23:31.724624537 +0000 UTC m=+141.795404123" observedRunningTime="2025-12-11 09:23:32.377526266 +0000 UTC m=+142.448305872" watchObservedRunningTime="2025-12-11 09:23:32.421415212 +0000 UTC m=+142.492194798" Dec 11 09:23:32 crc kubenswrapper[4788]: I1211 09:23:32.422558 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mdf99" podStartSLOduration=4.544529135 podStartE2EDuration="1m38.42254965s" podCreationTimestamp="2025-12-11 09:21:54 +0000 UTC" firstStartedPulling="2025-12-11 09:21:57.314125698 +0000 UTC m=+47.384905284" lastFinishedPulling="2025-12-11 09:23:31.192146213 +0000 UTC m=+141.262925799" observedRunningTime="2025-12-11 09:23:32.418092349 +0000 UTC m=+142.488871945" watchObservedRunningTime="2025-12-11 09:23:32.42254965 +0000 UTC m=+142.493329246" Dec 11 09:23:32 crc kubenswrapper[4788]: I1211 09:23:32.479733 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4cclk" podStartSLOduration=2.934061255 podStartE2EDuration="1m38.479712157s" podCreationTimestamp="2025-12-11 09:21:54 +0000 UTC" firstStartedPulling="2025-12-11 09:21:56.212620444 +0000 UTC m=+46.283400030" lastFinishedPulling="2025-12-11 09:23:31.758271346 +0000 UTC m=+141.829050932" observedRunningTime="2025-12-11 09:23:32.45541375 +0000 UTC m=+142.526193356" watchObservedRunningTime="2025-12-11 09:23:32.479712157 +0000 UTC m=+142.550491743" Dec 11 09:23:33 crc kubenswrapper[4788]: I1211 09:23:33.262135 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:23:33 crc kubenswrapper[4788]: I1211 09:23:33.262312 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:23:33 crc kubenswrapper[4788]: I1211 09:23:33.724877 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:23:33 crc kubenswrapper[4788]: I1211 09:23:33.724960 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:23:34 crc kubenswrapper[4788]: I1211 09:23:34.332288 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-c9gz6" podUID="38884c7e-079f-4133-99b7-a8232008072d" containerName="registry-server" probeResult="failure" output=< Dec 11 09:23:34 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 09:23:34 crc kubenswrapper[4788]: > Dec 11 09:23:34 crc kubenswrapper[4788]: I1211 09:23:34.652763 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:23:34 crc kubenswrapper[4788]: I1211 09:23:34.652808 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:23:34 crc kubenswrapper[4788]: I1211 09:23:34.707516 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:23:34 crc kubenswrapper[4788]: I1211 09:23:34.731714 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9q655" podStartSLOduration=5.926205169 podStartE2EDuration="1m42.731693407s" podCreationTimestamp="2025-12-11 09:21:52 +0000 UTC" firstStartedPulling="2025-12-11 09:21:55.097366108 +0000 UTC m=+45.168145694" lastFinishedPulling="2025-12-11 09:23:31.902854346 +0000 UTC m=+141.973633932" observedRunningTime="2025-12-11 09:23:32.518834893 +0000 UTC m=+142.589614479" watchObservedRunningTime="2025-12-11 09:23:34.731693407 +0000 UTC m=+144.802472993" Dec 11 09:23:34 crc kubenswrapper[4788]: I1211 09:23:34.797495 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-9q655" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerName="registry-server" probeResult="failure" output=< Dec 11 09:23:34 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 09:23:34 crc kubenswrapper[4788]: > Dec 11 09:23:34 crc kubenswrapper[4788]: I1211 09:23:34.936903 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:23:34 crc kubenswrapper[4788]: I1211 09:23:34.936977 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:23:34 crc kubenswrapper[4788]: I1211 09:23:34.979732 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:23:35 crc kubenswrapper[4788]: I1211 09:23:35.965043 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:23:35 crc kubenswrapper[4788]: I1211 09:23:35.965149 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.448781 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.448855 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.772454 4788 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.773147 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.773359 4788 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.773668 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd" gracePeriod=15 Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.773917 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771" gracePeriod=15 Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.773945 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88" gracePeriod=15 Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.774093 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f" gracePeriod=15 Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.773905 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32" gracePeriod=15 Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.775827 4788 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 09:23:36 crc kubenswrapper[4788]: E1211 09:23:36.776101 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776116 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 09:23:36 crc kubenswrapper[4788]: E1211 09:23:36.776134 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776142 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 09:23:36 crc kubenswrapper[4788]: E1211 09:23:36.776154 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776162 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 09:23:36 crc kubenswrapper[4788]: E1211 09:23:36.776178 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776185 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 09:23:36 crc kubenswrapper[4788]: E1211 09:23:36.776196 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776204 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 09:23:36 crc kubenswrapper[4788]: E1211 09:23:36.776215 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776246 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 11 09:23:36 crc kubenswrapper[4788]: E1211 09:23:36.776374 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776385 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776533 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776552 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776565 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776576 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776588 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.776839 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.822358 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.919247 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.919820 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.919909 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.919984 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.920009 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.920046 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.920116 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.920142 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:36 crc kubenswrapper[4788]: I1211 09:23:36.998661 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-j9fxc" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerName="registry-server" probeResult="failure" output=< Dec 11 09:23:36 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 09:23:36 crc kubenswrapper[4788]: > Dec 11 09:23:37 crc kubenswrapper[4788]: E1211 09:23:36.999393 4788 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.129.56.238:6443: connect: connection refused" event=< Dec 11 09:23:37 crc kubenswrapper[4788]: &Event{ObjectMeta:{redhat-operators-j9fxc.18801ee3a4b128fc openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-operators-j9fxc,UID:8e697aeb-5f1a-45f0-9c6e-5b65e638342c,APIVersion:v1,ResourceVersion:28346,FieldPath:spec.containers{registry-server},},Reason:Unhealthy,Message:Startup probe failed: timeout: failed to connect service ":50051" within 1s Dec 11 09:23:37 crc kubenswrapper[4788]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 09:23:36.998717692 +0000 UTC m=+147.069497278,LastTimestamp:2025-12-11 09:23:36.998717692 +0000 UTC m=+147.069497278,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 11 09:23:37 crc kubenswrapper[4788]: > Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021431 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021510 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021596 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021624 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021635 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021689 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021704 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021737 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021593 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021784 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021793 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021849 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021873 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021919 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021978 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.021979 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.118607 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:23:37 crc kubenswrapper[4788]: W1211 09:23:37.143032 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-ec66f56d43f8970c0234cdf3e5a6a313d9a8b68e38a804c462aa26e64fa3efe6 WatchSource:0}: Error finding container ec66f56d43f8970c0234cdf3e5a6a313d9a8b68e38a804c462aa26e64fa3efe6: Status 404 returned error can't find the container with id ec66f56d43f8970c0234cdf3e5a6a313d9a8b68e38a804c462aa26e64fa3efe6 Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.365424 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"ec66f56d43f8970c0234cdf3e5a6a313d9a8b68e38a804c462aa26e64fa3efe6"} Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.469379 4788 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.469460 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Dec 11 09:23:37 crc kubenswrapper[4788]: I1211 09:23:37.491699 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6zfxb" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" containerName="registry-server" probeResult="failure" output=< Dec 11 09:23:37 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 09:23:37 crc kubenswrapper[4788]: > Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.374414 4788 generic.go:334] "Generic (PLEG): container finished" podID="607b429c-8cbb-48ef-bc1a-7528f271aacb" containerID="86d51bbb2ad4a96789d93b0d5ab39a835ba69fc8994f7b1eb9d62f3ba58d9215" exitCode=0 Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.374545 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"607b429c-8cbb-48ef-bc1a-7528f271aacb","Type":"ContainerDied","Data":"86d51bbb2ad4a96789d93b0d5ab39a835ba69fc8994f7b1eb9d62f3ba58d9215"} Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.375854 4788 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.376317 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.377015 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.377140 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf"} Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.377830 4788 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.378365 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.378859 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.381139 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.382815 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.384098 4788 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88" exitCode=0 Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.384188 4788 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32" exitCode=0 Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.384202 4788 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771" exitCode=0 Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.384218 4788 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f" exitCode=2 Dec 11 09:23:38 crc kubenswrapper[4788]: I1211 09:23:38.384214 4788 scope.go:117] "RemoveContainer" containerID="ffffca3ea4e1dd0ab954c621cee795f0e8df1e379f3c93cacbb0c345a7351638" Dec 11 09:23:38 crc kubenswrapper[4788]: E1211 09:23:38.539052 4788 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.129.56.238:6443: connect: connection refused" event=< Dec 11 09:23:38 crc kubenswrapper[4788]: &Event{ObjectMeta:{redhat-operators-j9fxc.18801ee3a4b128fc openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-operators-j9fxc,UID:8e697aeb-5f1a-45f0-9c6e-5b65e638342c,APIVersion:v1,ResourceVersion:28346,FieldPath:spec.containers{registry-server},},Reason:Unhealthy,Message:Startup probe failed: timeout: failed to connect service ":50051" within 1s Dec 11 09:23:38 crc kubenswrapper[4788]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 09:23:36.998717692 +0000 UTC m=+147.069497278,LastTimestamp:2025-12-11 09:23:36.998717692 +0000 UTC m=+147.069497278,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 11 09:23:38 crc kubenswrapper[4788]: > Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.211905 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.213295 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.213951 4788 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.214255 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.214515 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.356316 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.356389 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.356477 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.356517 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.356661 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.356651 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.357274 4788 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.357307 4788 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.357321 4788 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.393968 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.394882 4788 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd" exitCode=0 Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.394988 4788 scope.go:117] "RemoveContainer" containerID="985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.395067 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.410787 4788 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.411419 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.411780 4788 scope.go:117] "RemoveContainer" containerID="207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.411868 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.430764 4788 scope.go:117] "RemoveContainer" containerID="c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.445924 4788 scope.go:117] "RemoveContainer" containerID="7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.460550 4788 scope.go:117] "RemoveContainer" containerID="8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.479471 4788 scope.go:117] "RemoveContainer" containerID="fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.509851 4788 scope.go:117] "RemoveContainer" containerID="985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88" Dec 11 09:23:39 crc kubenswrapper[4788]: E1211 09:23:39.510601 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88\": container with ID starting with 985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88 not found: ID does not exist" containerID="985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.510695 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88"} err="failed to get container status \"985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88\": rpc error: code = NotFound desc = could not find container \"985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88\": container with ID starting with 985f5f3199f72a71e3e473be768cd86b1ebcc5a638102c3be123085590001e88 not found: ID does not exist" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.510756 4788 scope.go:117] "RemoveContainer" containerID="207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32" Dec 11 09:23:39 crc kubenswrapper[4788]: E1211 09:23:39.511607 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32\": container with ID starting with 207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32 not found: ID does not exist" containerID="207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.511647 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32"} err="failed to get container status \"207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32\": rpc error: code = NotFound desc = could not find container \"207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32\": container with ID starting with 207ec691f080dddc4d255785ad8a02ef958b0fdbde1815c517880e7741225b32 not found: ID does not exist" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.511667 4788 scope.go:117] "RemoveContainer" containerID="c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771" Dec 11 09:23:39 crc kubenswrapper[4788]: E1211 09:23:39.512174 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771\": container with ID starting with c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771 not found: ID does not exist" containerID="c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.512197 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771"} err="failed to get container status \"c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771\": rpc error: code = NotFound desc = could not find container \"c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771\": container with ID starting with c57671e3723d3799e1e13e244724dca151c856c429afb87d303beeb9184c1771 not found: ID does not exist" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.512210 4788 scope.go:117] "RemoveContainer" containerID="7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f" Dec 11 09:23:39 crc kubenswrapper[4788]: E1211 09:23:39.512678 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f\": container with ID starting with 7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f not found: ID does not exist" containerID="7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.512714 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f"} err="failed to get container status \"7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f\": rpc error: code = NotFound desc = could not find container \"7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f\": container with ID starting with 7cd8709d84eaa49413b338b327cab611eb853852532a8a8e7e0f753c9f5a414f not found: ID does not exist" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.512748 4788 scope.go:117] "RemoveContainer" containerID="8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd" Dec 11 09:23:39 crc kubenswrapper[4788]: E1211 09:23:39.513754 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd\": container with ID starting with 8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd not found: ID does not exist" containerID="8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.513811 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd"} err="failed to get container status \"8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd\": rpc error: code = NotFound desc = could not find container \"8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd\": container with ID starting with 8092723f5fb27195b60f3faadcdb457904643b4fb1142d7dd5fb612389c295cd not found: ID does not exist" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.513830 4788 scope.go:117] "RemoveContainer" containerID="fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5" Dec 11 09:23:39 crc kubenswrapper[4788]: E1211 09:23:39.514206 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\": container with ID starting with fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5 not found: ID does not exist" containerID="fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.514286 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5"} err="failed to get container status \"fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\": rpc error: code = NotFound desc = could not find container \"fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5\": container with ID starting with fbe7647476703accf7ab2c69ae95c05d02fb5df67e190ccd928c1d11f15700f5 not found: ID does not exist" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.665369 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.666340 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.666552 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.666816 4788 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.762919 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-kubelet-dir\") pod \"607b429c-8cbb-48ef-bc1a-7528f271aacb\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.762976 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/607b429c-8cbb-48ef-bc1a-7528f271aacb-kube-api-access\") pod \"607b429c-8cbb-48ef-bc1a-7528f271aacb\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.762994 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-var-lock\") pod \"607b429c-8cbb-48ef-bc1a-7528f271aacb\" (UID: \"607b429c-8cbb-48ef-bc1a-7528f271aacb\") " Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.763106 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "607b429c-8cbb-48ef-bc1a-7528f271aacb" (UID: "607b429c-8cbb-48ef-bc1a-7528f271aacb"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.763180 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-var-lock" (OuterVolumeSpecName: "var-lock") pod "607b429c-8cbb-48ef-bc1a-7528f271aacb" (UID: "607b429c-8cbb-48ef-bc1a-7528f271aacb"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.763790 4788 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.763810 4788 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/607b429c-8cbb-48ef-bc1a-7528f271aacb-var-lock\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.771316 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/607b429c-8cbb-48ef-bc1a-7528f271aacb-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "607b429c-8cbb-48ef-bc1a-7528f271aacb" (UID: "607b429c-8cbb-48ef-bc1a-7528f271aacb"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:23:39 crc kubenswrapper[4788]: I1211 09:23:39.864552 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/607b429c-8cbb-48ef-bc1a-7528f271aacb-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 09:23:40 crc kubenswrapper[4788]: I1211 09:23:40.410754 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"607b429c-8cbb-48ef-bc1a-7528f271aacb","Type":"ContainerDied","Data":"d6a68e18ce1b25fccb49d8924719d4c0d28a0790f86507d0bd3100d6c886906c"} Dec 11 09:23:40 crc kubenswrapper[4788]: I1211 09:23:40.410824 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6a68e18ce1b25fccb49d8924719d4c0d28a0790f86507d0bd3100d6c886906c" Dec 11 09:23:40 crc kubenswrapper[4788]: I1211 09:23:40.410923 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 09:23:40 crc kubenswrapper[4788]: I1211 09:23:40.428346 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:40 crc kubenswrapper[4788]: I1211 09:23:40.428866 4788 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:40 crc kubenswrapper[4788]: I1211 09:23:40.429490 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:40 crc kubenswrapper[4788]: I1211 09:23:40.503206 4788 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:40 crc kubenswrapper[4788]: I1211 09:23:40.503760 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:40 crc kubenswrapper[4788]: I1211 09:23:40.504149 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:40 crc kubenswrapper[4788]: I1211 09:23:40.506885 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 11 09:23:42 crc kubenswrapper[4788]: E1211 09:23:42.064964 4788 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:42 crc kubenswrapper[4788]: E1211 09:23:42.065888 4788 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:42 crc kubenswrapper[4788]: E1211 09:23:42.066317 4788 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:42 crc kubenswrapper[4788]: E1211 09:23:42.066709 4788 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:42 crc kubenswrapper[4788]: E1211 09:23:42.067180 4788 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:42 crc kubenswrapper[4788]: I1211 09:23:42.067256 4788 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 11 09:23:42 crc kubenswrapper[4788]: E1211 09:23:42.067693 4788 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" interval="200ms" Dec 11 09:23:42 crc kubenswrapper[4788]: E1211 09:23:42.269019 4788 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" interval="400ms" Dec 11 09:23:42 crc kubenswrapper[4788]: E1211 09:23:42.670908 4788 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" interval="800ms" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.287894 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.288430 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.289499 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.289756 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.331748 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.332462 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.333167 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.333472 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: E1211 09:23:43.471787 4788 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" interval="1.6s" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.770069 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.770887 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.771289 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.771451 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.771633 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.810770 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.811378 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.811848 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.812036 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:43 crc kubenswrapper[4788]: I1211 09:23:43.812201 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.694076 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.694867 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.695325 4788 status_manager.go:851] "Failed to get status for pod" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" pod="openshift-marketplace/redhat-marketplace-4cclk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-4cclk\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.695945 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.696280 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.696636 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.976933 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.977535 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.977958 4788 status_manager.go:851] "Failed to get status for pod" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" pod="openshift-marketplace/redhat-marketplace-mdf99" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mdf99\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.978613 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.978933 4788 status_manager.go:851] "Failed to get status for pod" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" pod="openshift-marketplace/redhat-marketplace-4cclk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-4cclk\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.979268 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:44 crc kubenswrapper[4788]: I1211 09:23:44.979571 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:45 crc kubenswrapper[4788]: E1211 09:23:45.073157 4788 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" interval="3.2s" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.020911 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.021846 4788 status_manager.go:851] "Failed to get status for pod" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" pod="openshift-marketplace/redhat-marketplace-4cclk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-4cclk\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.022784 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.023093 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.023447 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.023675 4788 status_manager.go:851] "Failed to get status for pod" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" pod="openshift-marketplace/redhat-operators-j9fxc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-j9fxc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.023875 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.024074 4788 status_manager.go:851] "Failed to get status for pod" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" pod="openshift-marketplace/redhat-marketplace-mdf99" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mdf99\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.068394 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.068911 4788 status_manager.go:851] "Failed to get status for pod" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" pod="openshift-marketplace/redhat-marketplace-mdf99" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mdf99\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.069609 4788 status_manager.go:851] "Failed to get status for pod" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" pod="openshift-marketplace/redhat-marketplace-4cclk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-4cclk\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.070383 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.070923 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.071329 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.071713 4788 status_manager.go:851] "Failed to get status for pod" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" pod="openshift-marketplace/redhat-operators-j9fxc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-j9fxc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.072042 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.484582 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.485610 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.486208 4788 status_manager.go:851] "Failed to get status for pod" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" pod="openshift-marketplace/redhat-marketplace-mdf99" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mdf99\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.486571 4788 status_manager.go:851] "Failed to get status for pod" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" pod="openshift-marketplace/redhat-marketplace-4cclk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-4cclk\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.486926 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.487336 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.487641 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.487986 4788 status_manager.go:851] "Failed to get status for pod" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" pod="openshift-marketplace/redhat-operators-j9fxc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-j9fxc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.488391 4788 status_manager.go:851] "Failed to get status for pod" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" pod="openshift-marketplace/redhat-operators-6zfxb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-6zfxb\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.532351 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.533340 4788 status_manager.go:851] "Failed to get status for pod" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" pod="openshift-marketplace/redhat-operators-6zfxb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-6zfxb\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.533962 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.534382 4788 status_manager.go:851] "Failed to get status for pod" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" pod="openshift-marketplace/redhat-marketplace-mdf99" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mdf99\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.534717 4788 status_manager.go:851] "Failed to get status for pod" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" pod="openshift-marketplace/redhat-marketplace-4cclk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-4cclk\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.535058 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.535336 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.535605 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:46 crc kubenswrapper[4788]: I1211 09:23:46.535916 4788 status_manager.go:851] "Failed to get status for pod" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" pod="openshift-marketplace/redhat-operators-j9fxc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-j9fxc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:48 crc kubenswrapper[4788]: E1211 09:23:48.274128 4788 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.238:6443: connect: connection refused" interval="6.4s" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.496199 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.497349 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.498159 4788 status_manager.go:851] "Failed to get status for pod" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" pod="openshift-marketplace/redhat-marketplace-mdf99" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mdf99\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.498762 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.499241 4788 status_manager.go:851] "Failed to get status for pod" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" pod="openshift-marketplace/redhat-marketplace-4cclk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-4cclk\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.499624 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.499903 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.500162 4788 status_manager.go:851] "Failed to get status for pod" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" pod="openshift-marketplace/redhat-operators-j9fxc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-j9fxc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.500468 4788 status_manager.go:851] "Failed to get status for pod" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" pod="openshift-marketplace/redhat-operators-6zfxb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-6zfxb\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.518411 4788 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="36b7ed51-7958-44ed-94a5-17d3f2d6c3a2" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.518462 4788 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="36b7ed51-7958-44ed-94a5-17d3f2d6c3a2" Dec 11 09:23:48 crc kubenswrapper[4788]: E1211 09:23:48.519175 4788 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:48 crc kubenswrapper[4788]: I1211 09:23:48.520135 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:48 crc kubenswrapper[4788]: E1211 09:23:48.540537 4788 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.129.56.238:6443: connect: connection refused" event=< Dec 11 09:23:48 crc kubenswrapper[4788]: &Event{ObjectMeta:{redhat-operators-j9fxc.18801ee3a4b128fc openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-operators-j9fxc,UID:8e697aeb-5f1a-45f0-9c6e-5b65e638342c,APIVersion:v1,ResourceVersion:28346,FieldPath:spec.containers{registry-server},},Reason:Unhealthy,Message:Startup probe failed: timeout: failed to connect service ":50051" within 1s Dec 11 09:23:48 crc kubenswrapper[4788]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 09:23:36.998717692 +0000 UTC m=+147.069497278,LastTimestamp:2025-12-11 09:23:36.998717692 +0000 UTC m=+147.069497278,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 11 09:23:48 crc kubenswrapper[4788]: > Dec 11 09:23:49 crc kubenswrapper[4788]: I1211 09:23:49.469605 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e48cc5f376756c094f5054e58e474adf6b4dd468972ab61c14383d567b93b99f"} Dec 11 09:23:50 crc kubenswrapper[4788]: I1211 09:23:50.511246 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:50 crc kubenswrapper[4788]: I1211 09:23:50.511706 4788 status_manager.go:851] "Failed to get status for pod" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" pod="openshift-marketplace/redhat-marketplace-mdf99" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mdf99\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:50 crc kubenswrapper[4788]: I1211 09:23:50.512034 4788 status_manager.go:851] "Failed to get status for pod" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" pod="openshift-marketplace/redhat-marketplace-4cclk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-4cclk\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:50 crc kubenswrapper[4788]: I1211 09:23:50.512307 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:50 crc kubenswrapper[4788]: I1211 09:23:50.512602 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:50 crc kubenswrapper[4788]: I1211 09:23:50.513077 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:50 crc kubenswrapper[4788]: I1211 09:23:50.513400 4788 status_manager.go:851] "Failed to get status for pod" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" pod="openshift-marketplace/redhat-operators-j9fxc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-j9fxc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:50 crc kubenswrapper[4788]: I1211 09:23:50.513652 4788 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:50 crc kubenswrapper[4788]: I1211 09:23:50.513936 4788 status_manager.go:851] "Failed to get status for pod" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" pod="openshift-marketplace/redhat-operators-6zfxb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-6zfxb\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.369692 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.370254 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.482666 4788 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="65d6d56a690e770a12ed21a532e557a2571c36ea74a01950f1c06d29f41e074e" exitCode=0 Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.482743 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"65d6d56a690e770a12ed21a532e557a2571c36ea74a01950f1c06d29f41e074e"} Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.483025 4788 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="36b7ed51-7958-44ed-94a5-17d3f2d6c3a2" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.483056 4788 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="36b7ed51-7958-44ed-94a5-17d3f2d6c3a2" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.483450 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: E1211 09:23:51.483542 4788 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.483819 4788 status_manager.go:851] "Failed to get status for pod" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" pod="openshift-marketplace/redhat-marketplace-mdf99" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mdf99\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.484301 4788 status_manager.go:851] "Failed to get status for pod" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" pod="openshift-marketplace/redhat-marketplace-4cclk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-4cclk\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.484594 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.484869 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.485146 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.485501 4788 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.485684 4788 status_manager.go:851] "Failed to get status for pod" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" pod="openshift-marketplace/redhat-operators-j9fxc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-j9fxc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.485880 4788 status_manager.go:851] "Failed to get status for pod" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" pod="openshift-marketplace/redhat-operators-6zfxb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-6zfxb\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.486261 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.486312 4788 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22" exitCode=1 Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.486344 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22"} Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.486717 4788 scope.go:117] "RemoveContainer" containerID="4ed53e84dd5dc9cebb1c2ea23589622a9699075675d3f5f4e2fdd49b86d59f22" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.486825 4788 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.487037 4788 status_manager.go:851] "Failed to get status for pod" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" pod="openshift-marketplace/redhat-operators-j9fxc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-j9fxc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.487275 4788 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.487513 4788 status_manager.go:851] "Failed to get status for pod" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" pod="openshift-marketplace/redhat-operators-6zfxb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-6zfxb\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.487807 4788 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.488320 4788 status_manager.go:851] "Failed to get status for pod" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" pod="openshift-marketplace/redhat-marketplace-mdf99" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mdf99\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.489013 4788 status_manager.go:851] "Failed to get status for pod" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" pod="openshift-marketplace/redhat-marketplace-4cclk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-4cclk\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.489346 4788 status_manager.go:851] "Failed to get status for pod" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.489686 4788 status_manager.go:851] "Failed to get status for pod" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" pod="openshift-marketplace/certified-operators-9q655" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9q655\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.490015 4788 status_manager.go:851] "Failed to get status for pod" podUID="38884c7e-079f-4133-99b7-a8232008072d" pod="openshift-marketplace/certified-operators-c9gz6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c9gz6\": dial tcp 38.129.56.238:6443: connect: connection refused" Dec 11 09:23:51 crc kubenswrapper[4788]: I1211 09:23:51.721215 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:23:52 crc kubenswrapper[4788]: I1211 09:23:52.504424 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 11 09:23:52 crc kubenswrapper[4788]: I1211 09:23:52.505935 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"768a83a268f6ebaadb6ea5e40be8b111943c5e3cc9e47ba2c0a683eaaa7d55ba"} Dec 11 09:23:52 crc kubenswrapper[4788]: I1211 09:23:52.506001 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f9acac72f576f77396e14fbc101a4ccb70d116ee58dc3226f82345189f8a8620"} Dec 11 09:23:52 crc kubenswrapper[4788]: I1211 09:23:52.506014 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d68d7bd98f16eb4bcfb5cf659100aff6f8657f9cc5f1221e118f15a1f4ce3300"} Dec 11 09:23:52 crc kubenswrapper[4788]: I1211 09:23:52.506050 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8bb4ffd2565f3f6ff43cbba232051dea080996b98cb370cb1e49cbe0e099db22"} Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.517298 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"49631bf714ef8d52111c2fa6d1cfb37ca6f3ed202d737652cd32f58bdb701339"} Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.518463 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.518486 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a52df6e5c75d40975925040dd293fc2cab59c9e65769f3f656ea25308d8728ab"} Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.517579 4788 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="36b7ed51-7958-44ed-94a5-17d3f2d6c3a2" Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.518796 4788 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="36b7ed51-7958-44ed-94a5-17d3f2d6c3a2" Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.520386 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.520448 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.527359 4788 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]log ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]etcd ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/openshift.io-api-request-count-filter ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/openshift.io-startkubeinformers ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/generic-apiserver-start-informers ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/priority-and-fairness-config-consumer ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/priority-and-fairness-filter ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/start-apiextensions-informers ok Dec 11 09:23:53 crc kubenswrapper[4788]: [-]poststarthook/start-apiextensions-controllers failed: reason withheld Dec 11 09:23:53 crc kubenswrapper[4788]: [-]poststarthook/crd-informer-synced failed: reason withheld Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/start-system-namespaces-controller ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/start-cluster-authentication-info-controller ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/start-legacy-token-tracking-controller ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/start-service-ip-repair-controllers ok Dec 11 09:23:53 crc kubenswrapper[4788]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Dec 11 09:23:53 crc kubenswrapper[4788]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/priority-and-fairness-config-producer ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/bootstrap-controller ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/start-kube-aggregator-informers ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/apiservice-status-local-available-controller ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/apiservice-status-remote-available-controller ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/apiservice-registration-controller ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/apiservice-wait-for-first-sync ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/apiservice-discovery-controller ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/kube-apiserver-autoregistration ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]autoregister-completion ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/apiservice-openapi-controller ok Dec 11 09:23:53 crc kubenswrapper[4788]: [+]poststarthook/apiservice-openapiv3-controller ok Dec 11 09:23:53 crc kubenswrapper[4788]: livez check failed Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.527433 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.599937 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.600586 4788 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 11 09:23:53 crc kubenswrapper[4788]: I1211 09:23:53.600787 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 11 09:23:58 crc kubenswrapper[4788]: I1211 09:23:58.527047 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:58 crc kubenswrapper[4788]: I1211 09:23:58.532800 4788 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:23:58 crc kubenswrapper[4788]: I1211 09:23:58.554502 4788 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="dc4a1f10-8621-46e4-b5dd-411b50897706" Dec 11 09:23:59 crc kubenswrapper[4788]: I1211 09:23:59.562388 4788 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="36b7ed51-7958-44ed-94a5-17d3f2d6c3a2" Dec 11 09:23:59 crc kubenswrapper[4788]: I1211 09:23:59.562443 4788 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="36b7ed51-7958-44ed-94a5-17d3f2d6c3a2" Dec 11 09:24:00 crc kubenswrapper[4788]: I1211 09:24:00.519094 4788 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="dc4a1f10-8621-46e4-b5dd-411b50897706" Dec 11 09:24:01 crc kubenswrapper[4788]: I1211 09:24:01.721860 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:24:03 crc kubenswrapper[4788]: I1211 09:24:03.600949 4788 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 11 09:24:03 crc kubenswrapper[4788]: I1211 09:24:03.601614 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 11 09:24:07 crc kubenswrapper[4788]: I1211 09:24:07.950102 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 11 09:24:08 crc kubenswrapper[4788]: I1211 09:24:08.325954 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 11 09:24:08 crc kubenswrapper[4788]: I1211 09:24:08.665310 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.007553 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.280808 4788 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.403855 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.410040 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.562634 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.744865 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.744941 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.746269 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.780565 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.784828 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.808699 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 11 09:24:10 crc kubenswrapper[4788]: I1211 09:24:10.852405 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 11 09:24:11 crc kubenswrapper[4788]: I1211 09:24:11.449628 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 11 09:24:11 crc kubenswrapper[4788]: I1211 09:24:11.591695 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 11 09:24:11 crc kubenswrapper[4788]: I1211 09:24:11.623009 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 11 09:24:11 crc kubenswrapper[4788]: I1211 09:24:11.647761 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 11 09:24:11 crc kubenswrapper[4788]: I1211 09:24:11.826325 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 11 09:24:11 crc kubenswrapper[4788]: I1211 09:24:11.834262 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 11 09:24:11 crc kubenswrapper[4788]: I1211 09:24:11.843582 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 11 09:24:11 crc kubenswrapper[4788]: I1211 09:24:11.887028 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.006749 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.041828 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.288880 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.288880 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.322602 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.522221 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.551968 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.700168 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.719877 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.725862 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.846157 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.903071 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.904769 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 11 09:24:12 crc kubenswrapper[4788]: I1211 09:24:12.969978 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.021308 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.141440 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.207826 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.254259 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.408860 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.420041 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.509346 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.541601 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.557518 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.558819 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.611171 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.611742 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.620789 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.823996 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.848691 4788 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.852078 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 11 09:24:13 crc kubenswrapper[4788]: I1211 09:24:13.913046 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.004325 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.094468 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.161464 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.229816 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.341778 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.466444 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.521152 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.527366 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.527808 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.587213 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.630218 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.634296 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.658573 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.687565 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.734827 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.795820 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.836811 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.860652 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.943306 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.958152 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 11 09:24:14 crc kubenswrapper[4788]: I1211 09:24:14.974756 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.006417 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.011107 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.033373 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.072742 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.083615 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.129695 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.210785 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.267797 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.284497 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.358404 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.361182 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.411668 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.428178 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.449934 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.497224 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.670721 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.671736 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.722527 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.724848 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.822914 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.827418 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.903021 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.908009 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.969895 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 11 09:24:15 crc kubenswrapper[4788]: I1211 09:24:15.973805 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.047055 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.080715 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.085546 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.163516 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.222578 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.223262 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.223464 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.245924 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.328181 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.385012 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.423629 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.498970 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.637927 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.650108 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.685264 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.732985 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.823667 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.921265 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.923724 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.955504 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.958774 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.964751 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 11 09:24:16 crc kubenswrapper[4788]: I1211 09:24:16.999509 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.066520 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.107210 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.147637 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.185032 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.223050 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.253812 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.319870 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.320822 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.326002 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.351473 4788 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.381785 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.400849 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.469963 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.482660 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.484288 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.617301 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.618082 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.731820 4788 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.749834 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.793713 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.831771 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.870398 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.972996 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 11 09:24:17 crc kubenswrapper[4788]: I1211 09:24:17.988398 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.130116 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.193390 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.199372 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.244087 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.305537 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.335988 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.356048 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.381799 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.429549 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.459738 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.487485 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.491465 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.506896 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.617467 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.626677 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.707420 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.805604 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.809221 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.872326 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.885579 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 11 09:24:18 crc kubenswrapper[4788]: I1211 09:24:18.952429 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.041040 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.078998 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.100207 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.140768 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.217569 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.234857 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.312745 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.385872 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.396043 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.461162 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.461938 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.512375 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.634864 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.747732 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.755042 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.861432 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.924418 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.961797 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 11 09:24:19 crc kubenswrapper[4788]: I1211 09:24:19.999313 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.069757 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.130255 4788 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.132426 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=44.132410016 podStartE2EDuration="44.132410016s" podCreationTimestamp="2025-12-11 09:23:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:23:58.417424287 +0000 UTC m=+168.488203883" watchObservedRunningTime="2025-12-11 09:24:20.132410016 +0000 UTC m=+190.203189602" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.134640 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.134699 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.137460 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.137702 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.139030 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.139156 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.158584 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=22.158561349 podStartE2EDuration="22.158561349s" podCreationTimestamp="2025-12-11 09:23:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:24:20.156456036 +0000 UTC m=+190.227235642" watchObservedRunningTime="2025-12-11 09:24:20.158561349 +0000 UTC m=+190.229340945" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.204534 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.217815 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.230258 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.272045 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.280032 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.311408 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.399720 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.442835 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.467752 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.510587 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.528757 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.567122 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.678652 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.686327 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.733062 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.765142 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.848436 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.857773 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 11 09:24:20 crc kubenswrapper[4788]: I1211 09:24:20.970885 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.047464 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.066669 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.098173 4788 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.098475 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf" gracePeriod=5 Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.145366 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.307100 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.326563 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.347387 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.369009 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.369078 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.386656 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.392609 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.449698 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.489622 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.521263 4788 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.543517 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.743904 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 11 09:24:21 crc kubenswrapper[4788]: I1211 09:24:21.907027 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 11 09:24:22 crc kubenswrapper[4788]: I1211 09:24:22.108905 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 11 09:24:22 crc kubenswrapper[4788]: I1211 09:24:22.109526 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 11 09:24:22 crc kubenswrapper[4788]: I1211 09:24:22.156331 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 11 09:24:22 crc kubenswrapper[4788]: I1211 09:24:22.167446 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 11 09:24:22 crc kubenswrapper[4788]: I1211 09:24:22.199946 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 11 09:24:22 crc kubenswrapper[4788]: I1211 09:24:22.218052 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 09:24:22 crc kubenswrapper[4788]: I1211 09:24:22.535239 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 11 09:24:22 crc kubenswrapper[4788]: I1211 09:24:22.568257 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 11 09:24:22 crc kubenswrapper[4788]: I1211 09:24:22.624582 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 09:24:22 crc kubenswrapper[4788]: I1211 09:24:22.680919 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 11 09:24:22 crc kubenswrapper[4788]: I1211 09:24:22.809898 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 11 09:24:23 crc kubenswrapper[4788]: I1211 09:24:23.223505 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 09:24:23 crc kubenswrapper[4788]: I1211 09:24:23.426035 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 11 09:24:23 crc kubenswrapper[4788]: I1211 09:24:23.452283 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 11 09:24:23 crc kubenswrapper[4788]: I1211 09:24:23.495016 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 11 09:24:23 crc kubenswrapper[4788]: I1211 09:24:23.674307 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 11 09:24:23 crc kubenswrapper[4788]: I1211 09:24:23.730994 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 11 09:24:23 crc kubenswrapper[4788]: I1211 09:24:23.768429 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 11 09:24:23 crc kubenswrapper[4788]: I1211 09:24:23.861133 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 11 09:24:23 crc kubenswrapper[4788]: I1211 09:24:23.932847 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 11 09:24:24 crc kubenswrapper[4788]: I1211 09:24:24.006810 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 11 09:24:24 crc kubenswrapper[4788]: I1211 09:24:24.055303 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 11 09:24:24 crc kubenswrapper[4788]: I1211 09:24:24.087341 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 09:24:24 crc kubenswrapper[4788]: I1211 09:24:24.117300 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 11 09:24:24 crc kubenswrapper[4788]: I1211 09:24:24.746665 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 11 09:24:25 crc kubenswrapper[4788]: I1211 09:24:25.121627 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 11 09:24:25 crc kubenswrapper[4788]: I1211 09:24:25.179889 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 11 09:24:25 crc kubenswrapper[4788]: I1211 09:24:25.302550 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 11 09:24:25 crc kubenswrapper[4788]: I1211 09:24:25.340473 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.382097 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.382589 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.564134 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.564191 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.564213 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.564301 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.564356 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.564821 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.564902 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.564901 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.565046 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.575855 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.666726 4788 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.666785 4788 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.666797 4788 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.666808 4788 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.666819 4788 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.729738 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.729810 4788 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf" exitCode=137 Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.729894 4788 scope.go:117] "RemoveContainer" containerID="23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.729941 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.778712 4788 scope.go:117] "RemoveContainer" containerID="23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf" Dec 11 09:24:27 crc kubenswrapper[4788]: E1211 09:24:27.779408 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf\": container with ID starting with 23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf not found: ID does not exist" containerID="23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf" Dec 11 09:24:27 crc kubenswrapper[4788]: I1211 09:24:27.779466 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf"} err="failed to get container status \"23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf\": rpc error: code = NotFound desc = could not find container \"23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf\": container with ID starting with 23de4a0a7dde3c75f2ff1e91d85a11cbffc52c9bf1146e379d5cb7fcd2a900bf not found: ID does not exist" Dec 11 09:24:28 crc kubenswrapper[4788]: I1211 09:24:28.501345 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 11 09:24:28 crc kubenswrapper[4788]: I1211 09:24:28.501604 4788 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 11 09:24:28 crc kubenswrapper[4788]: I1211 09:24:28.523275 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 09:24:28 crc kubenswrapper[4788]: I1211 09:24:28.523337 4788 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="919435fc-26d7-4b71-afae-3d084beb5c43" Dec 11 09:24:28 crc kubenswrapper[4788]: I1211 09:24:28.527061 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 09:24:28 crc kubenswrapper[4788]: I1211 09:24:28.527106 4788 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="919435fc-26d7-4b71-afae-3d084beb5c43" Dec 11 09:24:39 crc kubenswrapper[4788]: I1211 09:24:39.154328 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 11 09:24:49 crc kubenswrapper[4788]: I1211 09:24:49.848874 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh"] Dec 11 09:24:49 crc kubenswrapper[4788]: I1211 09:24:49.850111 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" podUID="cd4a1f90-9ac0-41cc-b980-91964f48715d" containerName="route-controller-manager" containerID="cri-o://bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35" gracePeriod=30 Dec 11 09:24:49 crc kubenswrapper[4788]: I1211 09:24:49.854701 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-z97fq"] Dec 11 09:24:49 crc kubenswrapper[4788]: I1211 09:24:49.855704 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" podUID="19a593cb-a446-4977-9235-4b4036d2f2f8" containerName="controller-manager" containerID="cri-o://a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc" gracePeriod=30 Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.793295 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.801406 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.867701 4788 generic.go:334] "Generic (PLEG): container finished" podID="19a593cb-a446-4977-9235-4b4036d2f2f8" containerID="a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc" exitCode=0 Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.867788 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.867824 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" event={"ID":"19a593cb-a446-4977-9235-4b4036d2f2f8","Type":"ContainerDied","Data":"a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc"} Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.867870 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-z97fq" event={"ID":"19a593cb-a446-4977-9235-4b4036d2f2f8","Type":"ContainerDied","Data":"78ad6a8088ac412191e07e577615dde9fabaa07bfc182b6bc6e6736eabec992a"} Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.867896 4788 scope.go:117] "RemoveContainer" containerID="a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.872351 4788 generic.go:334] "Generic (PLEG): container finished" podID="cd4a1f90-9ac0-41cc-b980-91964f48715d" containerID="bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35" exitCode=0 Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.872405 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.872424 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" event={"ID":"cd4a1f90-9ac0-41cc-b980-91964f48715d","Type":"ContainerDied","Data":"bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35"} Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.872472 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh" event={"ID":"cd4a1f90-9ac0-41cc-b980-91964f48715d","Type":"ContainerDied","Data":"303a2064f0090d8bc000b0181f0d6abd6952fcb1dcc614905e8697b60d9608bd"} Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.895839 4788 scope.go:117] "RemoveContainer" containerID="a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc" Dec 11 09:24:50 crc kubenswrapper[4788]: E1211 09:24:50.896993 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc\": container with ID starting with a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc not found: ID does not exist" containerID="a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.897046 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc"} err="failed to get container status \"a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc\": rpc error: code = NotFound desc = could not find container \"a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc\": container with ID starting with a06b2a8f00bbd9d9dcd989d27c72b15dc917d72aa11aa74a595102d9135497bc not found: ID does not exist" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.897076 4788 scope.go:117] "RemoveContainer" containerID="bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.912888 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-config\") pod \"cd4a1f90-9ac0-41cc-b980-91964f48715d\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.913010 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-proxy-ca-bundles\") pod \"19a593cb-a446-4977-9235-4b4036d2f2f8\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.913106 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-client-ca\") pod \"cd4a1f90-9ac0-41cc-b980-91964f48715d\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.913192 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69sj6\" (UniqueName: \"kubernetes.io/projected/19a593cb-a446-4977-9235-4b4036d2f2f8-kube-api-access-69sj6\") pod \"19a593cb-a446-4977-9235-4b4036d2f2f8\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.913329 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-client-ca\") pod \"19a593cb-a446-4977-9235-4b4036d2f2f8\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.913457 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-config\") pod \"19a593cb-a446-4977-9235-4b4036d2f2f8\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.913580 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsrg5\" (UniqueName: \"kubernetes.io/projected/cd4a1f90-9ac0-41cc-b980-91964f48715d-kube-api-access-hsrg5\") pod \"cd4a1f90-9ac0-41cc-b980-91964f48715d\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.913666 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd4a1f90-9ac0-41cc-b980-91964f48715d-serving-cert\") pod \"cd4a1f90-9ac0-41cc-b980-91964f48715d\" (UID: \"cd4a1f90-9ac0-41cc-b980-91964f48715d\") " Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.913782 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19a593cb-a446-4977-9235-4b4036d2f2f8-serving-cert\") pod \"19a593cb-a446-4977-9235-4b4036d2f2f8\" (UID: \"19a593cb-a446-4977-9235-4b4036d2f2f8\") " Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.914465 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "19a593cb-a446-4977-9235-4b4036d2f2f8" (UID: "19a593cb-a446-4977-9235-4b4036d2f2f8"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.914482 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-client-ca" (OuterVolumeSpecName: "client-ca") pod "19a593cb-a446-4977-9235-4b4036d2f2f8" (UID: "19a593cb-a446-4977-9235-4b4036d2f2f8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.914594 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-config" (OuterVolumeSpecName: "config") pod "cd4a1f90-9ac0-41cc-b980-91964f48715d" (UID: "cd4a1f90-9ac0-41cc-b980-91964f48715d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.915460 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-config" (OuterVolumeSpecName: "config") pod "19a593cb-a446-4977-9235-4b4036d2f2f8" (UID: "19a593cb-a446-4977-9235-4b4036d2f2f8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.915727 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-client-ca" (OuterVolumeSpecName: "client-ca") pod "cd4a1f90-9ac0-41cc-b980-91964f48715d" (UID: "cd4a1f90-9ac0-41cc-b980-91964f48715d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.915826 4788 scope.go:117] "RemoveContainer" containerID="bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35" Dec 11 09:24:50 crc kubenswrapper[4788]: E1211 09:24:50.917595 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35\": container with ID starting with bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35 not found: ID does not exist" containerID="bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.917658 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35"} err="failed to get container status \"bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35\": rpc error: code = NotFound desc = could not find container \"bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35\": container with ID starting with bcef79ba200b482d54e860b42fb8af11ee90c70428fb562634a8472232d8cd35 not found: ID does not exist" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.922243 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd4a1f90-9ac0-41cc-b980-91964f48715d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "cd4a1f90-9ac0-41cc-b980-91964f48715d" (UID: "cd4a1f90-9ac0-41cc-b980-91964f48715d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.922358 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd4a1f90-9ac0-41cc-b980-91964f48715d-kube-api-access-hsrg5" (OuterVolumeSpecName: "kube-api-access-hsrg5") pod "cd4a1f90-9ac0-41cc-b980-91964f48715d" (UID: "cd4a1f90-9ac0-41cc-b980-91964f48715d"). InnerVolumeSpecName "kube-api-access-hsrg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.922832 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19a593cb-a446-4977-9235-4b4036d2f2f8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "19a593cb-a446-4977-9235-4b4036d2f2f8" (UID: "19a593cb-a446-4977-9235-4b4036d2f2f8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:24:50 crc kubenswrapper[4788]: I1211 09:24:50.922861 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19a593cb-a446-4977-9235-4b4036d2f2f8-kube-api-access-69sj6" (OuterVolumeSpecName: "kube-api-access-69sj6") pod "19a593cb-a446-4977-9235-4b4036d2f2f8" (UID: "19a593cb-a446-4977-9235-4b4036d2f2f8"). InnerVolumeSpecName "kube-api-access-69sj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.015520 4788 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.015570 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.015584 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsrg5\" (UniqueName: \"kubernetes.io/projected/cd4a1f90-9ac0-41cc-b980-91964f48715d-kube-api-access-hsrg5\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.015601 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd4a1f90-9ac0-41cc-b980-91964f48715d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.015611 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19a593cb-a446-4977-9235-4b4036d2f2f8-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.015624 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.015632 4788 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19a593cb-a446-4977-9235-4b4036d2f2f8-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.015643 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69sj6\" (UniqueName: \"kubernetes.io/projected/19a593cb-a446-4977-9235-4b4036d2f2f8-kube-api-access-69sj6\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.015667 4788 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd4a1f90-9ac0-41cc-b980-91964f48715d-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.198404 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-z97fq"] Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.202694 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-z97fq"] Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.212501 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh"] Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.218376 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7qxsh"] Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.369190 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.369300 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.369372 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.370508 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"607373e5c50f16bf23e11020a8b1f6a12dbe8b05ab7cbb3b21540e1d0962cdd3"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.370586 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://607373e5c50f16bf23e11020a8b1f6a12dbe8b05ab7cbb3b21540e1d0962cdd3" gracePeriod=600 Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.893064 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-b8c45c79b-jrnfd"] Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.894759 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="607373e5c50f16bf23e11020a8b1f6a12dbe8b05ab7cbb3b21540e1d0962cdd3" exitCode=0 Dec 11 09:24:51 crc kubenswrapper[4788]: E1211 09:24:51.901094 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd4a1f90-9ac0-41cc-b980-91964f48715d" containerName="route-controller-manager" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.901131 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd4a1f90-9ac0-41cc-b980-91964f48715d" containerName="route-controller-manager" Dec 11 09:24:51 crc kubenswrapper[4788]: E1211 09:24:51.901150 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19a593cb-a446-4977-9235-4b4036d2f2f8" containerName="controller-manager" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.901160 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="19a593cb-a446-4977-9235-4b4036d2f2f8" containerName="controller-manager" Dec 11 09:24:51 crc kubenswrapper[4788]: E1211 09:24:51.901174 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" containerName="installer" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.901182 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" containerName="installer" Dec 11 09:24:51 crc kubenswrapper[4788]: E1211 09:24:51.901194 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.901203 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.901382 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd4a1f90-9ac0-41cc-b980-91964f48715d" containerName="route-controller-manager" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.901407 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="19a593cb-a446-4977-9235-4b4036d2f2f8" containerName="controller-manager" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.901417 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.901426 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="607b429c-8cbb-48ef-bc1a-7528f271aacb" containerName="installer" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.901853 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"607373e5c50f16bf23e11020a8b1f6a12dbe8b05ab7cbb3b21540e1d0962cdd3"} Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.901893 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"a382cc415c2d2e543d18d9790addd4098843b8970234d0ee00ccb1cff0184b99"} Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.902170 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.907258 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.907623 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.907849 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.909886 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.910093 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.910396 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.911751 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.911915 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv"] Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.912827 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.918414 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-b8c45c79b-jrnfd"] Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.918483 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.918679 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.918707 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.918848 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.919109 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.919169 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.922021 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv"] Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.929545 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-config\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.929596 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc06e42a-c140-4122-8229-d484e5af8b94-config\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.929626 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-client-ca\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.929649 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljgcj\" (UniqueName: \"kubernetes.io/projected/05cfd378-9b56-4021-b5b8-35f9c2e44905-kube-api-access-ljgcj\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.929681 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc06e42a-c140-4122-8229-d484e5af8b94-client-ca\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.929701 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bc06e42a-c140-4122-8229-d484e5af8b94-proxy-ca-bundles\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.929721 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05cfd378-9b56-4021-b5b8-35f9c2e44905-serving-cert\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.929746 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlm6z\" (UniqueName: \"kubernetes.io/projected/bc06e42a-c140-4122-8229-d484e5af8b94-kube-api-access-zlm6z\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:51 crc kubenswrapper[4788]: I1211 09:24:51.929827 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc06e42a-c140-4122-8229-d484e5af8b94-serving-cert\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.030719 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-client-ca\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.030780 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljgcj\" (UniqueName: \"kubernetes.io/projected/05cfd378-9b56-4021-b5b8-35f9c2e44905-kube-api-access-ljgcj\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.030811 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc06e42a-c140-4122-8229-d484e5af8b94-client-ca\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.030829 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bc06e42a-c140-4122-8229-d484e5af8b94-proxy-ca-bundles\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.030845 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05cfd378-9b56-4021-b5b8-35f9c2e44905-serving-cert\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.030865 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlm6z\" (UniqueName: \"kubernetes.io/projected/bc06e42a-c140-4122-8229-d484e5af8b94-kube-api-access-zlm6z\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.030908 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc06e42a-c140-4122-8229-d484e5af8b94-serving-cert\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.030935 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-config\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.030957 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc06e42a-c140-4122-8229-d484e5af8b94-config\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.033157 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-client-ca\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.033108 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc06e42a-c140-4122-8229-d484e5af8b94-client-ca\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.033177 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bc06e42a-c140-4122-8229-d484e5af8b94-proxy-ca-bundles\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.033503 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc06e42a-c140-4122-8229-d484e5af8b94-config\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.033588 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-config\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.042260 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05cfd378-9b56-4021-b5b8-35f9c2e44905-serving-cert\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.045875 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc06e42a-c140-4122-8229-d484e5af8b94-serving-cert\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.055054 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlm6z\" (UniqueName: \"kubernetes.io/projected/bc06e42a-c140-4122-8229-d484e5af8b94-kube-api-access-zlm6z\") pod \"controller-manager-b8c45c79b-jrnfd\" (UID: \"bc06e42a-c140-4122-8229-d484e5af8b94\") " pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.056905 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljgcj\" (UniqueName: \"kubernetes.io/projected/05cfd378-9b56-4021-b5b8-35f9c2e44905-kube-api-access-ljgcj\") pod \"route-controller-manager-5d7d846658-mphvv\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.223016 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.237413 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.484899 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-b8c45c79b-jrnfd"] Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.519466 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19a593cb-a446-4977-9235-4b4036d2f2f8" path="/var/lib/kubelet/pods/19a593cb-a446-4977-9235-4b4036d2f2f8/volumes" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.520693 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd4a1f90-9ac0-41cc-b980-91964f48715d" path="/var/lib/kubelet/pods/cd4a1f90-9ac0-41cc-b980-91964f48715d/volumes" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.744661 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv"] Dec 11 09:24:52 crc kubenswrapper[4788]: W1211 09:24:52.751508 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05cfd378_9b56_4021_b5b8_35f9c2e44905.slice/crio-7db32b68861f21b693e1c3db4efdd85818587b0ce1a2a61f3370a0966a0a1457 WatchSource:0}: Error finding container 7db32b68861f21b693e1c3db4efdd85818587b0ce1a2a61f3370a0966a0a1457: Status 404 returned error can't find the container with id 7db32b68861f21b693e1c3db4efdd85818587b0ce1a2a61f3370a0966a0a1457 Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.909910 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" event={"ID":"05cfd378-9b56-4021-b5b8-35f9c2e44905","Type":"ContainerStarted","Data":"99b7d9449f38f1583642c07deec2a8ad075230784e08d5f0360e8b3ac17f389f"} Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.909988 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" event={"ID":"05cfd378-9b56-4021-b5b8-35f9c2e44905","Type":"ContainerStarted","Data":"7db32b68861f21b693e1c3db4efdd85818587b0ce1a2a61f3370a0966a0a1457"} Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.910207 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.911824 4788 patch_prober.go:28] interesting pod/route-controller-manager-5d7d846658-mphvv container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" start-of-body= Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.911885 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" podUID="05cfd378-9b56-4021-b5b8-35f9c2e44905" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.912386 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" event={"ID":"bc06e42a-c140-4122-8229-d484e5af8b94","Type":"ContainerStarted","Data":"f0f8d53be8f4010a5a6d1bb9086dda0c0dc1eca4ae42a58f59f72a494c2de210"} Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.912428 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" event={"ID":"bc06e42a-c140-4122-8229-d484e5af8b94","Type":"ContainerStarted","Data":"c1e85e3ec2cedd02122a1f69f000756a6c06bd941659ec22536280f1c5993d03"} Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.913022 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.930793 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" Dec 11 09:24:52 crc kubenswrapper[4788]: I1211 09:24:52.953715 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" podStartSLOduration=2.953685096 podStartE2EDuration="2.953685096s" podCreationTimestamp="2025-12-11 09:24:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:24:52.947661625 +0000 UTC m=+223.018441221" watchObservedRunningTime="2025-12-11 09:24:52.953685096 +0000 UTC m=+223.024464682" Dec 11 09:24:53 crc kubenswrapper[4788]: I1211 09:24:53.923456 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:24:53 crc kubenswrapper[4788]: I1211 09:24:53.941172 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-b8c45c79b-jrnfd" podStartSLOduration=4.941147558 podStartE2EDuration="4.941147558s" podCreationTimestamp="2025-12-11 09:24:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:24:52.988038953 +0000 UTC m=+223.058818539" watchObservedRunningTime="2025-12-11 09:24:53.941147558 +0000 UTC m=+224.011927144" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.530438 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-vzscw"] Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.532124 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.562365 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-vzscw"] Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.723134 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.723250 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-bound-sa-token\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.723298 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djdcp\" (UniqueName: \"kubernetes.io/projected/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-kube-api-access-djdcp\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.723317 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.723354 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.723373 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-registry-tls\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.723393 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-registry-certificates\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.723601 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-trusted-ca\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.748121 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.825396 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djdcp\" (UniqueName: \"kubernetes.io/projected/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-kube-api-access-djdcp\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.825467 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.825514 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.825539 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-registry-tls\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.825563 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-registry-certificates\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.825586 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-trusted-ca\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.825640 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-bound-sa-token\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.826556 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.827290 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-trusted-ca\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.827449 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-registry-certificates\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.834498 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-registry-tls\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.835044 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.848590 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-bound-sa-token\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.849420 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djdcp\" (UniqueName: \"kubernetes.io/projected/a84a0a5c-d484-4027-89d6-3f8d1c5cb51b-kube-api-access-djdcp\") pod \"image-registry-66df7c8f76-vzscw\" (UID: \"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b\") " pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:57 crc kubenswrapper[4788]: I1211 09:24:57.853561 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:58 crc kubenswrapper[4788]: I1211 09:24:58.341897 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-vzscw"] Dec 11 09:24:58 crc kubenswrapper[4788]: I1211 09:24:58.951485 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" event={"ID":"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b","Type":"ContainerStarted","Data":"752a9fc66195634ebfb423bb0b3b87fb1790dae0f4b8fdf889e949534ba515f1"} Dec 11 09:24:59 crc kubenswrapper[4788]: I1211 09:24:59.959314 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" event={"ID":"a84a0a5c-d484-4027-89d6-3f8d1c5cb51b","Type":"ContainerStarted","Data":"0d3e683dd244c19d52db2125cbe3b18dbd6eb486db15cdbaccc3e8ffaebf53bb"} Dec 11 09:24:59 crc kubenswrapper[4788]: I1211 09:24:59.959857 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:24:59 crc kubenswrapper[4788]: I1211 09:24:59.981667 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" podStartSLOduration=2.981639768 podStartE2EDuration="2.981639768s" podCreationTimestamp="2025-12-11 09:24:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:24:59.979444363 +0000 UTC m=+230.050223969" watchObservedRunningTime="2025-12-11 09:24:59.981639768 +0000 UTC m=+230.052419354" Dec 11 09:25:17 crc kubenswrapper[4788]: I1211 09:25:17.862661 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-vzscw" Dec 11 09:25:17 crc kubenswrapper[4788]: I1211 09:25:17.917796 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ll24x"] Dec 11 09:25:29 crc kubenswrapper[4788]: I1211 09:25:29.804755 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv"] Dec 11 09:25:29 crc kubenswrapper[4788]: I1211 09:25:29.805721 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" podUID="05cfd378-9b56-4021-b5b8-35f9c2e44905" containerName="route-controller-manager" containerID="cri-o://99b7d9449f38f1583642c07deec2a8ad075230784e08d5f0360e8b3ac17f389f" gracePeriod=30 Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.139515 4788 generic.go:334] "Generic (PLEG): container finished" podID="05cfd378-9b56-4021-b5b8-35f9c2e44905" containerID="99b7d9449f38f1583642c07deec2a8ad075230784e08d5f0360e8b3ac17f389f" exitCode=0 Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.139678 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" event={"ID":"05cfd378-9b56-4021-b5b8-35f9c2e44905","Type":"ContainerDied","Data":"99b7d9449f38f1583642c07deec2a8ad075230784e08d5f0360e8b3ac17f389f"} Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.487986 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.521397 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj"] Dec 11 09:25:31 crc kubenswrapper[4788]: E1211 09:25:31.521768 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05cfd378-9b56-4021-b5b8-35f9c2e44905" containerName="route-controller-manager" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.521799 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="05cfd378-9b56-4021-b5b8-35f9c2e44905" containerName="route-controller-manager" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.521947 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="05cfd378-9b56-4021-b5b8-35f9c2e44905" containerName="route-controller-manager" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.522474 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.539187 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj"] Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.668922 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-config\") pod \"05cfd378-9b56-4021-b5b8-35f9c2e44905\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.669001 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-client-ca\") pod \"05cfd378-9b56-4021-b5b8-35f9c2e44905\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.669044 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05cfd378-9b56-4021-b5b8-35f9c2e44905-serving-cert\") pod \"05cfd378-9b56-4021-b5b8-35f9c2e44905\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.669075 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljgcj\" (UniqueName: \"kubernetes.io/projected/05cfd378-9b56-4021-b5b8-35f9c2e44905-kube-api-access-ljgcj\") pod \"05cfd378-9b56-4021-b5b8-35f9c2e44905\" (UID: \"05cfd378-9b56-4021-b5b8-35f9c2e44905\") " Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.669644 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9dc22680-4848-46e0-9827-4d2af518f32f-config\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.669677 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9dc22680-4848-46e0-9827-4d2af518f32f-serving-cert\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.669802 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjb46\" (UniqueName: \"kubernetes.io/projected/9dc22680-4848-46e0-9827-4d2af518f32f-kube-api-access-pjb46\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.669837 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9dc22680-4848-46e0-9827-4d2af518f32f-client-ca\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.670165 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-client-ca" (OuterVolumeSpecName: "client-ca") pod "05cfd378-9b56-4021-b5b8-35f9c2e44905" (UID: "05cfd378-9b56-4021-b5b8-35f9c2e44905"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.670182 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-config" (OuterVolumeSpecName: "config") pod "05cfd378-9b56-4021-b5b8-35f9c2e44905" (UID: "05cfd378-9b56-4021-b5b8-35f9c2e44905"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.675911 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05cfd378-9b56-4021-b5b8-35f9c2e44905-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "05cfd378-9b56-4021-b5b8-35f9c2e44905" (UID: "05cfd378-9b56-4021-b5b8-35f9c2e44905"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.675994 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05cfd378-9b56-4021-b5b8-35f9c2e44905-kube-api-access-ljgcj" (OuterVolumeSpecName: "kube-api-access-ljgcj") pod "05cfd378-9b56-4021-b5b8-35f9c2e44905" (UID: "05cfd378-9b56-4021-b5b8-35f9c2e44905"). InnerVolumeSpecName "kube-api-access-ljgcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.771425 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjb46\" (UniqueName: \"kubernetes.io/projected/9dc22680-4848-46e0-9827-4d2af518f32f-kube-api-access-pjb46\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.771479 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9dc22680-4848-46e0-9827-4d2af518f32f-client-ca\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.771523 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9dc22680-4848-46e0-9827-4d2af518f32f-config\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.771542 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9dc22680-4848-46e0-9827-4d2af518f32f-serving-cert\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.771608 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.771620 4788 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/05cfd378-9b56-4021-b5b8-35f9c2e44905-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.771629 4788 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05cfd378-9b56-4021-b5b8-35f9c2e44905-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.771638 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljgcj\" (UniqueName: \"kubernetes.io/projected/05cfd378-9b56-4021-b5b8-35f9c2e44905-kube-api-access-ljgcj\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.772958 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9dc22680-4848-46e0-9827-4d2af518f32f-client-ca\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.773320 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9dc22680-4848-46e0-9827-4d2af518f32f-config\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.775961 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9dc22680-4848-46e0-9827-4d2af518f32f-serving-cert\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.790312 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjb46\" (UniqueName: \"kubernetes.io/projected/9dc22680-4848-46e0-9827-4d2af518f32f-kube-api-access-pjb46\") pod \"route-controller-manager-7f4767fcb-5twsj\" (UID: \"9dc22680-4848-46e0-9827-4d2af518f32f\") " pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:31 crc kubenswrapper[4788]: I1211 09:25:31.849529 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:32 crc kubenswrapper[4788]: I1211 09:25:32.152737 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" event={"ID":"05cfd378-9b56-4021-b5b8-35f9c2e44905","Type":"ContainerDied","Data":"7db32b68861f21b693e1c3db4efdd85818587b0ce1a2a61f3370a0966a0a1457"} Dec 11 09:25:32 crc kubenswrapper[4788]: I1211 09:25:32.152806 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv" Dec 11 09:25:32 crc kubenswrapper[4788]: I1211 09:25:32.152830 4788 scope.go:117] "RemoveContainer" containerID="99b7d9449f38f1583642c07deec2a8ad075230784e08d5f0360e8b3ac17f389f" Dec 11 09:25:32 crc kubenswrapper[4788]: I1211 09:25:32.209287 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv"] Dec 11 09:25:32 crc kubenswrapper[4788]: I1211 09:25:32.213401 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5d7d846658-mphvv"] Dec 11 09:25:32 crc kubenswrapper[4788]: I1211 09:25:32.273392 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj"] Dec 11 09:25:32 crc kubenswrapper[4788]: I1211 09:25:32.505448 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05cfd378-9b56-4021-b5b8-35f9c2e44905" path="/var/lib/kubelet/pods/05cfd378-9b56-4021-b5b8-35f9c2e44905/volumes" Dec 11 09:25:33 crc kubenswrapper[4788]: I1211 09:25:33.162128 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" event={"ID":"9dc22680-4848-46e0-9827-4d2af518f32f","Type":"ContainerStarted","Data":"4184010e677f3adb2eccf0871d878fa0ec56ba9fcfe443bc3eb1ef302172d4e9"} Dec 11 09:25:33 crc kubenswrapper[4788]: I1211 09:25:33.162188 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" event={"ID":"9dc22680-4848-46e0-9827-4d2af518f32f","Type":"ContainerStarted","Data":"168423f04d22e9d7f4a09fa06b573572c52c96b67c1fad2060d700e125dc95d9"} Dec 11 09:25:33 crc kubenswrapper[4788]: I1211 09:25:33.162567 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:33 crc kubenswrapper[4788]: I1211 09:25:33.179919 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" podStartSLOduration=4.179897455 podStartE2EDuration="4.179897455s" podCreationTimestamp="2025-12-11 09:25:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:25:33.17930677 +0000 UTC m=+263.250086366" watchObservedRunningTime="2025-12-11 09:25:33.179897455 +0000 UTC m=+263.250677041" Dec 11 09:25:33 crc kubenswrapper[4788]: I1211 09:25:33.185924 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7f4767fcb-5twsj" Dec 11 09:25:42 crc kubenswrapper[4788]: I1211 09:25:42.964443 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" podUID="9bede3eb-f7c3-40df-84a6-2c34e3834acd" containerName="registry" containerID="cri-o://b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581" gracePeriod=30 Dec 11 09:25:43 crc kubenswrapper[4788]: I1211 09:25:43.689483 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdf99"] Dec 11 09:25:43 crc kubenswrapper[4788]: I1211 09:25:43.690179 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mdf99" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" containerName="registry-server" containerID="cri-o://4954871b104e079f648869914bea58e85ea568990610aab03fb0ca670317321a" gracePeriod=2 Dec 11 09:25:43 crc kubenswrapper[4788]: I1211 09:25:43.881158 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6zfxb"] Dec 11 09:25:43 crc kubenswrapper[4788]: I1211 09:25:43.881644 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6zfxb" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" containerName="registry-server" containerID="cri-o://7b76830ffbc3d4e95d517e58be7fa2c57fa5c19e031060bf090f12ab48f90054" gracePeriod=2 Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.038802 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.140778 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9bede3eb-f7c3-40df-84a6-2c34e3834acd-installation-pull-secrets\") pod \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.141099 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.141125 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-certificates\") pod \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.141166 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-trusted-ca\") pod \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.141201 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4gcs\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-kube-api-access-q4gcs\") pod \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.141244 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-tls\") pod \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.141272 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9bede3eb-f7c3-40df-84a6-2c34e3834acd-ca-trust-extracted\") pod \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.141293 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-bound-sa-token\") pod \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\" (UID: \"9bede3eb-f7c3-40df-84a6-2c34e3834acd\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.142451 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "9bede3eb-f7c3-40df-84a6-2c34e3834acd" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.142504 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9bede3eb-f7c3-40df-84a6-2c34e3834acd" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.151395 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "9bede3eb-f7c3-40df-84a6-2c34e3834acd" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.156581 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "9bede3eb-f7c3-40df-84a6-2c34e3834acd" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.163011 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "9bede3eb-f7c3-40df-84a6-2c34e3834acd" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.167108 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bede3eb-f7c3-40df-84a6-2c34e3834acd-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "9bede3eb-f7c3-40df-84a6-2c34e3834acd" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.168581 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bede3eb-f7c3-40df-84a6-2c34e3834acd-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "9bede3eb-f7c3-40df-84a6-2c34e3834acd" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.175326 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-kube-api-access-q4gcs" (OuterVolumeSpecName: "kube-api-access-q4gcs") pod "9bede3eb-f7c3-40df-84a6-2c34e3834acd" (UID: "9bede3eb-f7c3-40df-84a6-2c34e3834acd"). InnerVolumeSpecName "kube-api-access-q4gcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.225917 4788 generic.go:334] "Generic (PLEG): container finished" podID="58eec917-a769-40a8-b458-6c7b189dcb19" containerID="7b76830ffbc3d4e95d517e58be7fa2c57fa5c19e031060bf090f12ab48f90054" exitCode=0 Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.226011 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zfxb" event={"ID":"58eec917-a769-40a8-b458-6c7b189dcb19","Type":"ContainerDied","Data":"7b76830ffbc3d4e95d517e58be7fa2c57fa5c19e031060bf090f12ab48f90054"} Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.228715 4788 generic.go:334] "Generic (PLEG): container finished" podID="9bede3eb-f7c3-40df-84a6-2c34e3834acd" containerID="b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581" exitCode=0 Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.228857 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.228967 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" event={"ID":"9bede3eb-f7c3-40df-84a6-2c34e3834acd","Type":"ContainerDied","Data":"b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581"} Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.229115 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ll24x" event={"ID":"9bede3eb-f7c3-40df-84a6-2c34e3834acd","Type":"ContainerDied","Data":"8740f60773bcbc6fee4ec948cefe0f16e1d6b38a617c6ce65a64bbd0ae690a6b"} Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.229152 4788 scope.go:117] "RemoveContainer" containerID="b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.233212 4788 generic.go:334] "Generic (PLEG): container finished" podID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" containerID="4954871b104e079f648869914bea58e85ea568990610aab03fb0ca670317321a" exitCode=0 Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.233320 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdf99" event={"ID":"1cc339d9-17c7-4461-bc56-a6c12f422aa9","Type":"ContainerDied","Data":"4954871b104e079f648869914bea58e85ea568990610aab03fb0ca670317321a"} Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.244192 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4gcs\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-kube-api-access-q4gcs\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.244311 4788 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.244330 4788 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9bede3eb-f7c3-40df-84a6-2c34e3834acd-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.244343 4788 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9bede3eb-f7c3-40df-84a6-2c34e3834acd-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.244355 4788 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9bede3eb-f7c3-40df-84a6-2c34e3834acd-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.244366 4788 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.244377 4788 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9bede3eb-f7c3-40df-84a6-2c34e3834acd-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.265435 4788 scope.go:117] "RemoveContainer" containerID="b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581" Dec 11 09:25:44 crc kubenswrapper[4788]: E1211 09:25:44.266416 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581\": container with ID starting with b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581 not found: ID does not exist" containerID="b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.266458 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581"} err="failed to get container status \"b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581\": rpc error: code = NotFound desc = could not find container \"b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581\": container with ID starting with b7bf2a16ad86f1362007933691db4cc358d3f5f5781e9cfb13fd81df82d49581 not found: ID does not exist" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.267906 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ll24x"] Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.274622 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ll24x"] Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.349578 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.446708 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-utilities\") pod \"58eec917-a769-40a8-b458-6c7b189dcb19\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.446782 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvpcj\" (UniqueName: \"kubernetes.io/projected/58eec917-a769-40a8-b458-6c7b189dcb19-kube-api-access-cvpcj\") pod \"58eec917-a769-40a8-b458-6c7b189dcb19\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.446827 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-catalog-content\") pod \"58eec917-a769-40a8-b458-6c7b189dcb19\" (UID: \"58eec917-a769-40a8-b458-6c7b189dcb19\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.451334 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-utilities" (OuterVolumeSpecName: "utilities") pod "58eec917-a769-40a8-b458-6c7b189dcb19" (UID: "58eec917-a769-40a8-b458-6c7b189dcb19"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.457892 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58eec917-a769-40a8-b458-6c7b189dcb19-kube-api-access-cvpcj" (OuterVolumeSpecName: "kube-api-access-cvpcj") pod "58eec917-a769-40a8-b458-6c7b189dcb19" (UID: "58eec917-a769-40a8-b458-6c7b189dcb19"). InnerVolumeSpecName "kube-api-access-cvpcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.503036 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bede3eb-f7c3-40df-84a6-2c34e3834acd" path="/var/lib/kubelet/pods/9bede3eb-f7c3-40df-84a6-2c34e3834acd/volumes" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.548381 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.548421 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvpcj\" (UniqueName: \"kubernetes.io/projected/58eec917-a769-40a8-b458-6c7b189dcb19-kube-api-access-cvpcj\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.571728 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58eec917-a769-40a8-b458-6c7b189dcb19" (UID: "58eec917-a769-40a8-b458-6c7b189dcb19"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.613710 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.649322 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-utilities\") pod \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.649421 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dh8l8\" (UniqueName: \"kubernetes.io/projected/1cc339d9-17c7-4461-bc56-a6c12f422aa9-kube-api-access-dh8l8\") pod \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.649453 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-catalog-content\") pod \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\" (UID: \"1cc339d9-17c7-4461-bc56-a6c12f422aa9\") " Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.649670 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58eec917-a769-40a8-b458-6c7b189dcb19-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.650690 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-utilities" (OuterVolumeSpecName: "utilities") pod "1cc339d9-17c7-4461-bc56-a6c12f422aa9" (UID: "1cc339d9-17c7-4461-bc56-a6c12f422aa9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.654475 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cc339d9-17c7-4461-bc56-a6c12f422aa9-kube-api-access-dh8l8" (OuterVolumeSpecName: "kube-api-access-dh8l8") pod "1cc339d9-17c7-4461-bc56-a6c12f422aa9" (UID: "1cc339d9-17c7-4461-bc56-a6c12f422aa9"). InnerVolumeSpecName "kube-api-access-dh8l8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.671627 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1cc339d9-17c7-4461-bc56-a6c12f422aa9" (UID: "1cc339d9-17c7-4461-bc56-a6c12f422aa9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.750908 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.750986 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dh8l8\" (UniqueName: \"kubernetes.io/projected/1cc339d9-17c7-4461-bc56-a6c12f422aa9-kube-api-access-dh8l8\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:44 crc kubenswrapper[4788]: I1211 09:25:44.750999 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cc339d9-17c7-4461-bc56-a6c12f422aa9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.245854 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdf99" event={"ID":"1cc339d9-17c7-4461-bc56-a6c12f422aa9","Type":"ContainerDied","Data":"cf3b0d2ef8643957ff567fb2bf38d193e36f83eebff7af06d20a82d0ec758592"} Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.246545 4788 scope.go:117] "RemoveContainer" containerID="4954871b104e079f648869914bea58e85ea568990610aab03fb0ca670317321a" Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.246780 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdf99" Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.253372 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zfxb" event={"ID":"58eec917-a769-40a8-b458-6c7b189dcb19","Type":"ContainerDied","Data":"e2ed8e88b3db183136c0fa5d0315262d5a33f6ae35b348ebd2478cfa1bee785c"} Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.253600 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6zfxb" Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.277600 4788 scope.go:117] "RemoveContainer" containerID="97be21c8ae1f82fa649a398333f9a9e5c8c137b27e49946b598cd82fe4ecb2e5" Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.293257 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6zfxb"] Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.297133 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6zfxb"] Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.307952 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdf99"] Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.311886 4788 scope.go:117] "RemoveContainer" containerID="d6afd0068dfa2c1354939d6b2b0ccbde32056e0edde1eb02d2a9b15d21503ba1" Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.311898 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdf99"] Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.329685 4788 scope.go:117] "RemoveContainer" containerID="7b76830ffbc3d4e95d517e58be7fa2c57fa5c19e031060bf090f12ab48f90054" Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.345925 4788 scope.go:117] "RemoveContainer" containerID="d02b7bace31913625512f12ba4ad260ace16bae9cbc315c1478f2c17a69e1403" Dec 11 09:25:45 crc kubenswrapper[4788]: I1211 09:25:45.364456 4788 scope.go:117] "RemoveContainer" containerID="1e8fb418bb37cda0de9a1f299bc70015cfb888c484c2ed8be1796aeb86a4a992" Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.081305 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c9gz6"] Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.081678 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c9gz6" podUID="38884c7e-079f-4133-99b7-a8232008072d" containerName="registry-server" containerID="cri-o://76f88d0006c2be8df672ce76cb15150ac69764891b00cb55ef0e882542e8a85e" gracePeriod=2 Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.264106 4788 generic.go:334] "Generic (PLEG): container finished" podID="38884c7e-079f-4133-99b7-a8232008072d" containerID="76f88d0006c2be8df672ce76cb15150ac69764891b00cb55ef0e882542e8a85e" exitCode=0 Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.264521 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9gz6" event={"ID":"38884c7e-079f-4133-99b7-a8232008072d","Type":"ContainerDied","Data":"76f88d0006c2be8df672ce76cb15150ac69764891b00cb55ef0e882542e8a85e"} Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.475857 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.505499 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" path="/var/lib/kubelet/pods/1cc339d9-17c7-4461-bc56-a6c12f422aa9/volumes" Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.506337 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" path="/var/lib/kubelet/pods/58eec917-a769-40a8-b458-6c7b189dcb19/volumes" Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.573888 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-utilities\") pod \"38884c7e-079f-4133-99b7-a8232008072d\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.573969 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-catalog-content\") pod \"38884c7e-079f-4133-99b7-a8232008072d\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.573997 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lv6x4\" (UniqueName: \"kubernetes.io/projected/38884c7e-079f-4133-99b7-a8232008072d-kube-api-access-lv6x4\") pod \"38884c7e-079f-4133-99b7-a8232008072d\" (UID: \"38884c7e-079f-4133-99b7-a8232008072d\") " Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.575039 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-utilities" (OuterVolumeSpecName: "utilities") pod "38884c7e-079f-4133-99b7-a8232008072d" (UID: "38884c7e-079f-4133-99b7-a8232008072d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.580995 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38884c7e-079f-4133-99b7-a8232008072d-kube-api-access-lv6x4" (OuterVolumeSpecName: "kube-api-access-lv6x4") pod "38884c7e-079f-4133-99b7-a8232008072d" (UID: "38884c7e-079f-4133-99b7-a8232008072d"). InnerVolumeSpecName "kube-api-access-lv6x4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.631157 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "38884c7e-079f-4133-99b7-a8232008072d" (UID: "38884c7e-079f-4133-99b7-a8232008072d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.675618 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.675688 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38884c7e-079f-4133-99b7-a8232008072d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:46 crc kubenswrapper[4788]: I1211 09:25:46.675710 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lv6x4\" (UniqueName: \"kubernetes.io/projected/38884c7e-079f-4133-99b7-a8232008072d-kube-api-access-lv6x4\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:47 crc kubenswrapper[4788]: I1211 09:25:47.274092 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9gz6" event={"ID":"38884c7e-079f-4133-99b7-a8232008072d","Type":"ContainerDied","Data":"a567c53648d4c79a67ebfa9cef8423a730e52305763b781365cbc5303184856f"} Dec 11 09:25:47 crc kubenswrapper[4788]: I1211 09:25:47.274173 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9gz6" Dec 11 09:25:47 crc kubenswrapper[4788]: I1211 09:25:47.274597 4788 scope.go:117] "RemoveContainer" containerID="76f88d0006c2be8df672ce76cb15150ac69764891b00cb55ef0e882542e8a85e" Dec 11 09:25:47 crc kubenswrapper[4788]: I1211 09:25:47.304041 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c9gz6"] Dec 11 09:25:47 crc kubenswrapper[4788]: I1211 09:25:47.309837 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c9gz6"] Dec 11 09:25:47 crc kubenswrapper[4788]: I1211 09:25:47.310924 4788 scope.go:117] "RemoveContainer" containerID="1e8f76d5cd5adf61de6a2ba8c38a0b3e4f95c3190701263bc7d73b1346b950dc" Dec 11 09:25:47 crc kubenswrapper[4788]: I1211 09:25:47.339109 4788 scope.go:117] "RemoveContainer" containerID="25ed7d97cef742272a9ba95e4e388bfae4ad067be1512a3f476313c4288ff84b" Dec 11 09:25:48 crc kubenswrapper[4788]: I1211 09:25:48.505037 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38884c7e-079f-4133-99b7-a8232008072d" path="/var/lib/kubelet/pods/38884c7e-079f-4133-99b7-a8232008072d/volumes" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.836640 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9q655"] Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.838011 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9q655" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerName="registry-server" containerID="cri-o://c8983eecf82b198da20f3f0de59738ba5ec534ead82e96aec0e7464a6263e5b5" gracePeriod=30 Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.844855 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wz9mc"] Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.845437 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wz9mc" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerName="registry-server" containerID="cri-o://25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051" gracePeriod=30 Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.858736 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nmcnl"] Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.859011 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" podUID="926d4b75-3809-4fce-89e7-8076befa3b1b" containerName="marketplace-operator" containerID="cri-o://75adb216915fc7f75d0837fe2d6bc04241d91580d8b03e49409cd3eb0d77d120" gracePeriod=30 Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.869623 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4cclk"] Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.869972 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4cclk" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" containerName="registry-server" containerID="cri-o://b27b67a371c9f92bed329674864dbddd3f13829d954e77f8e17a938d1667a3d3" gracePeriod=30 Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.874434 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j9fxc"] Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.874700 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-j9fxc" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerName="registry-server" containerID="cri-o://5b9661be0df648975704fa9807f79d8bc754a613ac99bc516de352fbc3ed1413" gracePeriod=30 Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.878212 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qdm6m"] Dec 11 09:25:50 crc kubenswrapper[4788]: E1211 09:25:50.879029 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" containerName="extract-content" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879047 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" containerName="extract-content" Dec 11 09:25:50 crc kubenswrapper[4788]: E1211 09:25:50.879060 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38884c7e-079f-4133-99b7-a8232008072d" containerName="registry-server" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879066 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="38884c7e-079f-4133-99b7-a8232008072d" containerName="registry-server" Dec 11 09:25:50 crc kubenswrapper[4788]: E1211 09:25:50.879074 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38884c7e-079f-4133-99b7-a8232008072d" containerName="extract-content" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879080 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="38884c7e-079f-4133-99b7-a8232008072d" containerName="extract-content" Dec 11 09:25:50 crc kubenswrapper[4788]: E1211 09:25:50.879089 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" containerName="extract-content" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879096 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" containerName="extract-content" Dec 11 09:25:50 crc kubenswrapper[4788]: E1211 09:25:50.879103 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" containerName="registry-server" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879108 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" containerName="registry-server" Dec 11 09:25:50 crc kubenswrapper[4788]: E1211 09:25:50.879116 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" containerName="registry-server" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879122 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" containerName="registry-server" Dec 11 09:25:50 crc kubenswrapper[4788]: E1211 09:25:50.879131 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bede3eb-f7c3-40df-84a6-2c34e3834acd" containerName="registry" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879137 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bede3eb-f7c3-40df-84a6-2c34e3834acd" containerName="registry" Dec 11 09:25:50 crc kubenswrapper[4788]: E1211 09:25:50.879142 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38884c7e-079f-4133-99b7-a8232008072d" containerName="extract-utilities" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879148 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="38884c7e-079f-4133-99b7-a8232008072d" containerName="extract-utilities" Dec 11 09:25:50 crc kubenswrapper[4788]: E1211 09:25:50.879157 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" containerName="extract-utilities" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879164 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" containerName="extract-utilities" Dec 11 09:25:50 crc kubenswrapper[4788]: E1211 09:25:50.879172 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" containerName="extract-utilities" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879180 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" containerName="extract-utilities" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879293 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="58eec917-a769-40a8-b458-6c7b189dcb19" containerName="registry-server" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879303 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cc339d9-17c7-4461-bc56-a6c12f422aa9" containerName="registry-server" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879316 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bede3eb-f7c3-40df-84a6-2c34e3834acd" containerName="registry" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879326 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="38884c7e-079f-4133-99b7-a8232008072d" containerName="registry-server" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.879769 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.891659 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qdm6m"] Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.937764 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3406c148-fa4c-403c-bf11-02f53cf14170-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qdm6m\" (UID: \"3406c148-fa4c-403c-bf11-02f53cf14170\") " pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.937825 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3406c148-fa4c-403c-bf11-02f53cf14170-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qdm6m\" (UID: \"3406c148-fa4c-403c-bf11-02f53cf14170\") " pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:50 crc kubenswrapper[4788]: I1211 09:25:50.937891 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p87mk\" (UniqueName: \"kubernetes.io/projected/3406c148-fa4c-403c-bf11-02f53cf14170-kube-api-access-p87mk\") pod \"marketplace-operator-79b997595-qdm6m\" (UID: \"3406c148-fa4c-403c-bf11-02f53cf14170\") " pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:51 crc kubenswrapper[4788]: I1211 09:25:51.038766 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p87mk\" (UniqueName: \"kubernetes.io/projected/3406c148-fa4c-403c-bf11-02f53cf14170-kube-api-access-p87mk\") pod \"marketplace-operator-79b997595-qdm6m\" (UID: \"3406c148-fa4c-403c-bf11-02f53cf14170\") " pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:51 crc kubenswrapper[4788]: I1211 09:25:51.038931 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3406c148-fa4c-403c-bf11-02f53cf14170-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qdm6m\" (UID: \"3406c148-fa4c-403c-bf11-02f53cf14170\") " pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:51 crc kubenswrapper[4788]: I1211 09:25:51.039003 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3406c148-fa4c-403c-bf11-02f53cf14170-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qdm6m\" (UID: \"3406c148-fa4c-403c-bf11-02f53cf14170\") " pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:51 crc kubenswrapper[4788]: I1211 09:25:51.041046 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3406c148-fa4c-403c-bf11-02f53cf14170-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qdm6m\" (UID: \"3406c148-fa4c-403c-bf11-02f53cf14170\") " pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:51 crc kubenswrapper[4788]: I1211 09:25:51.047110 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3406c148-fa4c-403c-bf11-02f53cf14170-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qdm6m\" (UID: \"3406c148-fa4c-403c-bf11-02f53cf14170\") " pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:51 crc kubenswrapper[4788]: I1211 09:25:51.061855 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p87mk\" (UniqueName: \"kubernetes.io/projected/3406c148-fa4c-403c-bf11-02f53cf14170-kube-api-access-p87mk\") pod \"marketplace-operator-79b997595-qdm6m\" (UID: \"3406c148-fa4c-403c-bf11-02f53cf14170\") " pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:51 crc kubenswrapper[4788]: I1211 09:25:51.203867 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:51.614831 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qdm6m"] Dec 11 09:25:52 crc kubenswrapper[4788]: W1211 09:25:51.722687 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3406c148_fa4c_403c_bf11_02f53cf14170.slice/crio-fe4bcdd73fe89145bebc987bea4ebb5012ec401746512765761a432c3b9a1121 WatchSource:0}: Error finding container fe4bcdd73fe89145bebc987bea4ebb5012ec401746512765761a432c3b9a1121: Status 404 returned error can't find the container with id fe4bcdd73fe89145bebc987bea4ebb5012ec401746512765761a432c3b9a1121 Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.330906 4788 generic.go:334] "Generic (PLEG): container finished" podID="926d4b75-3809-4fce-89e7-8076befa3b1b" containerID="75adb216915fc7f75d0837fe2d6bc04241d91580d8b03e49409cd3eb0d77d120" exitCode=0 Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.330989 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" event={"ID":"926d4b75-3809-4fce-89e7-8076befa3b1b","Type":"ContainerDied","Data":"75adb216915fc7f75d0837fe2d6bc04241d91580d8b03e49409cd3eb0d77d120"} Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.333479 4788 generic.go:334] "Generic (PLEG): container finished" podID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerID="25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051" exitCode=0 Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.333583 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz9mc" event={"ID":"23f0f07c-d5d8-4a8c-8546-77e15ef979f5","Type":"ContainerDied","Data":"25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051"} Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.335869 4788 generic.go:334] "Generic (PLEG): container finished" podID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerID="5b9661be0df648975704fa9807f79d8bc754a613ac99bc516de352fbc3ed1413" exitCode=0 Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.335967 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9fxc" event={"ID":"8e697aeb-5f1a-45f0-9c6e-5b65e638342c","Type":"ContainerDied","Data":"5b9661be0df648975704fa9807f79d8bc754a613ac99bc516de352fbc3ed1413"} Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.338545 4788 generic.go:334] "Generic (PLEG): container finished" podID="efaaaaf2-7778-46d4-9400-7c31a2f82765" containerID="b27b67a371c9f92bed329674864dbddd3f13829d954e77f8e17a938d1667a3d3" exitCode=0 Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.338605 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cclk" event={"ID":"efaaaaf2-7778-46d4-9400-7c31a2f82765","Type":"ContainerDied","Data":"b27b67a371c9f92bed329674864dbddd3f13829d954e77f8e17a938d1667a3d3"} Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.339871 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" event={"ID":"3406c148-fa4c-403c-bf11-02f53cf14170","Type":"ContainerStarted","Data":"fe4bcdd73fe89145bebc987bea4ebb5012ec401746512765761a432c3b9a1121"} Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.341889 4788 generic.go:334] "Generic (PLEG): container finished" podID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerID="c8983eecf82b198da20f3f0de59738ba5ec534ead82e96aec0e7464a6263e5b5" exitCode=0 Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.341990 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9q655" event={"ID":"a35c01a8-01f0-48f9-a529-33ccc58161c9","Type":"ContainerDied","Data":"c8983eecf82b198da20f3f0de59738ba5ec534ead82e96aec0e7464a6263e5b5"} Dec 11 09:25:52 crc kubenswrapper[4788]: E1211 09:25:52.506254 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051 is running failed: container process not found" containerID="25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 09:25:52 crc kubenswrapper[4788]: E1211 09:25:52.507479 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051 is running failed: container process not found" containerID="25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 09:25:52 crc kubenswrapper[4788]: E1211 09:25:52.507963 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051 is running failed: container process not found" containerID="25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 09:25:52 crc kubenswrapper[4788]: E1211 09:25:52.508016 4788 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-wz9mc" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerName="registry-server" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.704153 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.804065 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.818296 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.822855 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.834283 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.878897 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-catalog-content\") pod \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.878993 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-utilities\") pod \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.879062 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnvv8\" (UniqueName: \"kubernetes.io/projected/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-kube-api-access-lnvv8\") pod \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\" (UID: \"23f0f07c-d5d8-4a8c-8546-77e15ef979f5\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.880318 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-utilities" (OuterVolumeSpecName: "utilities") pod "23f0f07c-d5d8-4a8c-8546-77e15ef979f5" (UID: "23f0f07c-d5d8-4a8c-8546-77e15ef979f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.886984 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-kube-api-access-lnvv8" (OuterVolumeSpecName: "kube-api-access-lnvv8") pod "23f0f07c-d5d8-4a8c-8546-77e15ef979f5" (UID: "23f0f07c-d5d8-4a8c-8546-77e15ef979f5"). InnerVolumeSpecName "kube-api-access-lnvv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.963172 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23f0f07c-d5d8-4a8c-8546-77e15ef979f5" (UID: "23f0f07c-d5d8-4a8c-8546-77e15ef979f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.982614 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-catalog-content\") pod \"efaaaaf2-7778-46d4-9400-7c31a2f82765\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.982712 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m66d7\" (UniqueName: \"kubernetes.io/projected/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-kube-api-access-m66d7\") pod \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.982747 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-trusted-ca\") pod \"926d4b75-3809-4fce-89e7-8076befa3b1b\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.982855 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-catalog-content\") pod \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.982921 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xqgj\" (UniqueName: \"kubernetes.io/projected/a35c01a8-01f0-48f9-a529-33ccc58161c9-kube-api-access-8xqgj\") pod \"a35c01a8-01f0-48f9-a529-33ccc58161c9\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.982972 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-catalog-content\") pod \"a35c01a8-01f0-48f9-a529-33ccc58161c9\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.983000 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-utilities\") pod \"efaaaaf2-7778-46d4-9400-7c31a2f82765\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.983034 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmg22\" (UniqueName: \"kubernetes.io/projected/926d4b75-3809-4fce-89e7-8076befa3b1b-kube-api-access-mmg22\") pod \"926d4b75-3809-4fce-89e7-8076befa3b1b\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.983066 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-utilities\") pod \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\" (UID: \"8e697aeb-5f1a-45f0-9c6e-5b65e638342c\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.983099 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfc4n\" (UniqueName: \"kubernetes.io/projected/efaaaaf2-7778-46d4-9400-7c31a2f82765-kube-api-access-xfc4n\") pod \"efaaaaf2-7778-46d4-9400-7c31a2f82765\" (UID: \"efaaaaf2-7778-46d4-9400-7c31a2f82765\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.983133 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-utilities\") pod \"a35c01a8-01f0-48f9-a529-33ccc58161c9\" (UID: \"a35c01a8-01f0-48f9-a529-33ccc58161c9\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.983833 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-utilities" (OuterVolumeSpecName: "utilities") pod "efaaaaf2-7778-46d4-9400-7c31a2f82765" (UID: "efaaaaf2-7778-46d4-9400-7c31a2f82765"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.983867 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "926d4b75-3809-4fce-89e7-8076befa3b1b" (UID: "926d4b75-3809-4fce-89e7-8076befa3b1b"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.984079 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-operator-metrics\") pod \"926d4b75-3809-4fce-89e7-8076befa3b1b\" (UID: \"926d4b75-3809-4fce-89e7-8076befa3b1b\") " Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.984560 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.984589 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.984606 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.984620 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnvv8\" (UniqueName: \"kubernetes.io/projected/23f0f07c-d5d8-4a8c-8546-77e15ef979f5-kube-api-access-lnvv8\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.984633 4788 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.984852 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-utilities" (OuterVolumeSpecName: "utilities") pod "a35c01a8-01f0-48f9-a529-33ccc58161c9" (UID: "a35c01a8-01f0-48f9-a529-33ccc58161c9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.984910 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-utilities" (OuterVolumeSpecName: "utilities") pod "8e697aeb-5f1a-45f0-9c6e-5b65e638342c" (UID: "8e697aeb-5f1a-45f0-9c6e-5b65e638342c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.987711 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-kube-api-access-m66d7" (OuterVolumeSpecName: "kube-api-access-m66d7") pod "8e697aeb-5f1a-45f0-9c6e-5b65e638342c" (UID: "8e697aeb-5f1a-45f0-9c6e-5b65e638342c"). InnerVolumeSpecName "kube-api-access-m66d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.987853 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a35c01a8-01f0-48f9-a529-33ccc58161c9-kube-api-access-8xqgj" (OuterVolumeSpecName: "kube-api-access-8xqgj") pod "a35c01a8-01f0-48f9-a529-33ccc58161c9" (UID: "a35c01a8-01f0-48f9-a529-33ccc58161c9"). InnerVolumeSpecName "kube-api-access-8xqgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.989433 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "926d4b75-3809-4fce-89e7-8076befa3b1b" (UID: "926d4b75-3809-4fce-89e7-8076befa3b1b"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:25:52 crc kubenswrapper[4788]: I1211 09:25:52.989499 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/926d4b75-3809-4fce-89e7-8076befa3b1b-kube-api-access-mmg22" (OuterVolumeSpecName: "kube-api-access-mmg22") pod "926d4b75-3809-4fce-89e7-8076befa3b1b" (UID: "926d4b75-3809-4fce-89e7-8076befa3b1b"). InnerVolumeSpecName "kube-api-access-mmg22". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.005959 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efaaaaf2-7778-46d4-9400-7c31a2f82765-kube-api-access-xfc4n" (OuterVolumeSpecName: "kube-api-access-xfc4n") pod "efaaaaf2-7778-46d4-9400-7c31a2f82765" (UID: "efaaaaf2-7778-46d4-9400-7c31a2f82765"). InnerVolumeSpecName "kube-api-access-xfc4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.036403 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "efaaaaf2-7778-46d4-9400-7c31a2f82765" (UID: "efaaaaf2-7778-46d4-9400-7c31a2f82765"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.049725 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a35c01a8-01f0-48f9-a529-33ccc58161c9" (UID: "a35c01a8-01f0-48f9-a529-33ccc58161c9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.086407 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.086473 4788 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/926d4b75-3809-4fce-89e7-8076befa3b1b-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.086499 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efaaaaf2-7778-46d4-9400-7c31a2f82765-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.086516 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m66d7\" (UniqueName: \"kubernetes.io/projected/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-kube-api-access-m66d7\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.086535 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xqgj\" (UniqueName: \"kubernetes.io/projected/a35c01a8-01f0-48f9-a529-33ccc58161c9-kube-api-access-8xqgj\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.086550 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a35c01a8-01f0-48f9-a529-33ccc58161c9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.086564 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmg22\" (UniqueName: \"kubernetes.io/projected/926d4b75-3809-4fce-89e7-8076befa3b1b-kube-api-access-mmg22\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.086578 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.086591 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfc4n\" (UniqueName: \"kubernetes.io/projected/efaaaaf2-7778-46d4-9400-7c31a2f82765-kube-api-access-xfc4n\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.124196 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8e697aeb-5f1a-45f0-9c6e-5b65e638342c" (UID: "8e697aeb-5f1a-45f0-9c6e-5b65e638342c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.188059 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e697aeb-5f1a-45f0-9c6e-5b65e638342c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.348701 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" event={"ID":"926d4b75-3809-4fce-89e7-8076befa3b1b","Type":"ContainerDied","Data":"a7f71d240bb161218a4267d5f897506fcf515754b58a96bc1ac0d2d16a4b5470"} Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.348746 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nmcnl" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.348804 4788 scope.go:117] "RemoveContainer" containerID="75adb216915fc7f75d0837fe2d6bc04241d91580d8b03e49409cd3eb0d77d120" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.350920 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz9mc" event={"ID":"23f0f07c-d5d8-4a8c-8546-77e15ef979f5","Type":"ContainerDied","Data":"9cf8def9be9dd85a7f840953308471efdcda89482b72fa1df655bd3039e9c9e3"} Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.351007 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz9mc" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.354164 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9fxc" event={"ID":"8e697aeb-5f1a-45f0-9c6e-5b65e638342c","Type":"ContainerDied","Data":"fd073c117cc021ec37b96350d1b4421533cb41502ef6b19e79f076e7ee07afd6"} Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.354268 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j9fxc" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.357450 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4cclk" event={"ID":"efaaaaf2-7778-46d4-9400-7c31a2f82765","Type":"ContainerDied","Data":"5d3aa19a3bdf93e8f0f1f3ad1f4303730f82db6225750423bc95187d27a57ef0"} Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.357661 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4cclk" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.366341 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9q655" event={"ID":"a35c01a8-01f0-48f9-a529-33ccc58161c9","Type":"ContainerDied","Data":"a2c48857238e7ec2c40287025afe20570c51109df3b04d97e88e0f25122944cd"} Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.366499 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9q655" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.367152 4788 scope.go:117] "RemoveContainer" containerID="25779b9d2ee0ab58b114b79a5ffb0d8b6b5f05fc0cb34d3be4507d6d960a4051" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.368370 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" event={"ID":"3406c148-fa4c-403c-bf11-02f53cf14170","Type":"ContainerStarted","Data":"082a75e6feb5305ff328fcb9a98cbb959b3ab9b0d8e838ab8ebf5999ce1a18f8"} Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.368752 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.374263 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.404608 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-qdm6m" podStartSLOduration=3.404585871 podStartE2EDuration="3.404585871s" podCreationTimestamp="2025-12-11 09:25:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:25:53.399436763 +0000 UTC m=+283.470216369" watchObservedRunningTime="2025-12-11 09:25:53.404585871 +0000 UTC m=+283.475365457" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.431856 4788 scope.go:117] "RemoveContainer" containerID="4c170b741a1a8948ab6156accbddbdd747061a968e119e31883aa668384f7afe" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.433286 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nmcnl"] Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.437639 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nmcnl"] Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.444275 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wz9mc"] Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.473087 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wz9mc"] Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.474352 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9q655"] Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.478475 4788 scope.go:117] "RemoveContainer" containerID="33459cdbb3aeb0d58e5dec01f55ac4d9b6e54d39de1b3c3b6bb3c109c2c7cf40" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.479555 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9q655"] Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.482881 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j9fxc"] Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.485915 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-j9fxc"] Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.489657 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4cclk"] Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.493281 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4cclk"] Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.497134 4788 scope.go:117] "RemoveContainer" containerID="5b9661be0df648975704fa9807f79d8bc754a613ac99bc516de352fbc3ed1413" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.515740 4788 scope.go:117] "RemoveContainer" containerID="c98ad4544a4600f178c3914fb99681ff6ae5b3d641ffee4b6cfbabe422bee64e" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.540052 4788 scope.go:117] "RemoveContainer" containerID="9380ab1899f9c190a6b634832a5ce6b1bfcfec5f0a4809acae8f9ae54d438bff" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.560088 4788 scope.go:117] "RemoveContainer" containerID="b27b67a371c9f92bed329674864dbddd3f13829d954e77f8e17a938d1667a3d3" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.578334 4788 scope.go:117] "RemoveContainer" containerID="9c4d1ac02cdd0e2e7cc6d823ac590704d93f3ac0a3d7195ffc3840a8c52f041b" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.595427 4788 scope.go:117] "RemoveContainer" containerID="10cdf08fc160444684e3a3970bf76c221775628c6a9cde86eb996723ca2ead7a" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.618922 4788 scope.go:117] "RemoveContainer" containerID="c8983eecf82b198da20f3f0de59738ba5ec534ead82e96aec0e7464a6263e5b5" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.635861 4788 scope.go:117] "RemoveContainer" containerID="460c132537fbab015f5d1d59f90a9674778bc3d82011ceff7d97ac2e147d3cef" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.655311 4788 scope.go:117] "RemoveContainer" containerID="51bb90b9cc045c0f904731c7f899b13a86956a65b6313888546d4b8af06db734" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.892659 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dx56s"] Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893014 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="926d4b75-3809-4fce-89e7-8076befa3b1b" containerName="marketplace-operator" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893035 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="926d4b75-3809-4fce-89e7-8076befa3b1b" containerName="marketplace-operator" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893052 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerName="extract-content" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893060 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerName="extract-content" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893072 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893083 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893095 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerName="extract-utilities" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893103 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerName="extract-utilities" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893115 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893124 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893140 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerName="extract-utilities" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893150 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerName="extract-utilities" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893162 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerName="extract-content" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893171 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerName="extract-content" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893184 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893190 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893202 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893209 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893218 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" containerName="extract-content" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893243 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" containerName="extract-content" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893252 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerName="extract-content" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893259 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerName="extract-content" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893268 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerName="extract-utilities" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893275 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerName="extract-utilities" Dec 11 09:25:53 crc kubenswrapper[4788]: E1211 09:25:53.893286 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" containerName="extract-utilities" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893293 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" containerName="extract-utilities" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893417 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893434 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="926d4b75-3809-4fce-89e7-8076befa3b1b" containerName="marketplace-operator" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893445 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893454 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.893465 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" containerName="registry-server" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.900517 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.908868 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.908883 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dx56s"] Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.999027 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-catalog-content\") pod \"certified-operators-dx56s\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.999081 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndgdh\" (UniqueName: \"kubernetes.io/projected/8c91a81d-da68-4da8-9081-38be3d8f3213-kube-api-access-ndgdh\") pod \"certified-operators-dx56s\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:25:53 crc kubenswrapper[4788]: I1211 09:25:53.999131 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-utilities\") pod \"certified-operators-dx56s\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.100264 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-utilities\") pod \"certified-operators-dx56s\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.100400 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-catalog-content\") pod \"certified-operators-dx56s\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.100431 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndgdh\" (UniqueName: \"kubernetes.io/projected/8c91a81d-da68-4da8-9081-38be3d8f3213-kube-api-access-ndgdh\") pod \"certified-operators-dx56s\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.100852 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-utilities\") pod \"certified-operators-dx56s\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.100865 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-catalog-content\") pod \"certified-operators-dx56s\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.121569 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndgdh\" (UniqueName: \"kubernetes.io/projected/8c91a81d-da68-4da8-9081-38be3d8f3213-kube-api-access-ndgdh\") pod \"certified-operators-dx56s\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.219016 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.429807 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dx56s"] Dec 11 09:25:54 crc kubenswrapper[4788]: W1211 09:25:54.438258 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c91a81d_da68_4da8_9081_38be3d8f3213.slice/crio-6945395b276c8dea07782c25b50f1b890e983bfdeb3fa9274da77700b9c5a4bd WatchSource:0}: Error finding container 6945395b276c8dea07782c25b50f1b890e983bfdeb3fa9274da77700b9c5a4bd: Status 404 returned error can't find the container with id 6945395b276c8dea07782c25b50f1b890e983bfdeb3fa9274da77700b9c5a4bd Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.504930 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23f0f07c-d5d8-4a8c-8546-77e15ef979f5" path="/var/lib/kubelet/pods/23f0f07c-d5d8-4a8c-8546-77e15ef979f5/volumes" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.505664 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e697aeb-5f1a-45f0-9c6e-5b65e638342c" path="/var/lib/kubelet/pods/8e697aeb-5f1a-45f0-9c6e-5b65e638342c/volumes" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.506547 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="926d4b75-3809-4fce-89e7-8076befa3b1b" path="/var/lib/kubelet/pods/926d4b75-3809-4fce-89e7-8076befa3b1b/volumes" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.507509 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a35c01a8-01f0-48f9-a529-33ccc58161c9" path="/var/lib/kubelet/pods/a35c01a8-01f0-48f9-a529-33ccc58161c9/volumes" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.508085 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efaaaaf2-7778-46d4-9400-7c31a2f82765" path="/var/lib/kubelet/pods/efaaaaf2-7778-46d4-9400-7c31a2f82765/volumes" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.889555 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2ts2m"] Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.891128 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.895489 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 09:25:54 crc kubenswrapper[4788]: I1211 09:25:54.904740 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2ts2m"] Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.013839 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91bc93e0-4d8b-4b80-a02b-527c1c6e57f3-catalog-content\") pod \"redhat-operators-2ts2m\" (UID: \"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3\") " pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.013932 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swx6h\" (UniqueName: \"kubernetes.io/projected/91bc93e0-4d8b-4b80-a02b-527c1c6e57f3-kube-api-access-swx6h\") pod \"redhat-operators-2ts2m\" (UID: \"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3\") " pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.013974 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91bc93e0-4d8b-4b80-a02b-527c1c6e57f3-utilities\") pod \"redhat-operators-2ts2m\" (UID: \"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3\") " pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.115695 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swx6h\" (UniqueName: \"kubernetes.io/projected/91bc93e0-4d8b-4b80-a02b-527c1c6e57f3-kube-api-access-swx6h\") pod \"redhat-operators-2ts2m\" (UID: \"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3\") " pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.115753 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91bc93e0-4d8b-4b80-a02b-527c1c6e57f3-utilities\") pod \"redhat-operators-2ts2m\" (UID: \"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3\") " pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.115827 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91bc93e0-4d8b-4b80-a02b-527c1c6e57f3-catalog-content\") pod \"redhat-operators-2ts2m\" (UID: \"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3\") " pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.116193 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91bc93e0-4d8b-4b80-a02b-527c1c6e57f3-utilities\") pod \"redhat-operators-2ts2m\" (UID: \"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3\") " pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.116281 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91bc93e0-4d8b-4b80-a02b-527c1c6e57f3-catalog-content\") pod \"redhat-operators-2ts2m\" (UID: \"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3\") " pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.140992 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swx6h\" (UniqueName: \"kubernetes.io/projected/91bc93e0-4d8b-4b80-a02b-527c1c6e57f3-kube-api-access-swx6h\") pod \"redhat-operators-2ts2m\" (UID: \"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3\") " pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.213648 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.397024 4788 generic.go:334] "Generic (PLEG): container finished" podID="8c91a81d-da68-4da8-9081-38be3d8f3213" containerID="f5fa1136d21b5b7879f56289ccbc6cbbaa4e9e6c1e1d54377bb2c569e9a6b7b7" exitCode=0 Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.397109 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dx56s" event={"ID":"8c91a81d-da68-4da8-9081-38be3d8f3213","Type":"ContainerDied","Data":"f5fa1136d21b5b7879f56289ccbc6cbbaa4e9e6c1e1d54377bb2c569e9a6b7b7"} Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.397664 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dx56s" event={"ID":"8c91a81d-da68-4da8-9081-38be3d8f3213","Type":"ContainerStarted","Data":"6945395b276c8dea07782c25b50f1b890e983bfdeb3fa9274da77700b9c5a4bd"} Dec 11 09:25:55 crc kubenswrapper[4788]: I1211 09:25:55.662539 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2ts2m"] Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.290942 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h9g2p"] Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.292509 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.296960 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.303547 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h9g2p"] Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.404888 4788 generic.go:334] "Generic (PLEG): container finished" podID="91bc93e0-4d8b-4b80-a02b-527c1c6e57f3" containerID="d3f09de596b10615fdbd4afe7114bee3f1a82f135e054f589ce70b3b528d5c98" exitCode=0 Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.404946 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ts2m" event={"ID":"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3","Type":"ContainerDied","Data":"d3f09de596b10615fdbd4afe7114bee3f1a82f135e054f589ce70b3b528d5c98"} Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.404984 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ts2m" event={"ID":"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3","Type":"ContainerStarted","Data":"d2227d46c16579c606e6afe14fcde16f524d88919bb077571da0f0d4c605eccf"} Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.434382 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-catalog-content\") pod \"community-operators-h9g2p\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.434506 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-utilities\") pod \"community-operators-h9g2p\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.434533 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv5j9\" (UniqueName: \"kubernetes.io/projected/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-kube-api-access-gv5j9\") pod \"community-operators-h9g2p\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.535273 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-utilities\") pod \"community-operators-h9g2p\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.535311 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv5j9\" (UniqueName: \"kubernetes.io/projected/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-kube-api-access-gv5j9\") pod \"community-operators-h9g2p\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.535364 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-catalog-content\") pod \"community-operators-h9g2p\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.535843 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-catalog-content\") pod \"community-operators-h9g2p\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.536354 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-utilities\") pod \"community-operators-h9g2p\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.559505 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv5j9\" (UniqueName: \"kubernetes.io/projected/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-kube-api-access-gv5j9\") pod \"community-operators-h9g2p\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:25:56 crc kubenswrapper[4788]: I1211 09:25:56.617932 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.028075 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h9g2p"] Dec 11 09:25:57 crc kubenswrapper[4788]: W1211 09:25:57.032433 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4f094f5_cc0c_4f3f_81af_36f4887fae9c.slice/crio-da5d0f059d5368a027fbda7aec32251273290695f3561537229a666095176205 WatchSource:0}: Error finding container da5d0f059d5368a027fbda7aec32251273290695f3561537229a666095176205: Status 404 returned error can't find the container with id da5d0f059d5368a027fbda7aec32251273290695f3561537229a666095176205 Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.290465 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wfsbn"] Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.291853 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.293830 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.311116 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wfsbn"] Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.413263 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9g2p" event={"ID":"e4f094f5-cc0c-4f3f-81af-36f4887fae9c","Type":"ContainerStarted","Data":"da5d0f059d5368a027fbda7aec32251273290695f3561537229a666095176205"} Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.445862 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b2aa4d-369a-413f-a676-46dcb0957ffa-catalog-content\") pod \"redhat-marketplace-wfsbn\" (UID: \"83b2aa4d-369a-413f-a676-46dcb0957ffa\") " pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.447972 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b2aa4d-369a-413f-a676-46dcb0957ffa-utilities\") pod \"redhat-marketplace-wfsbn\" (UID: \"83b2aa4d-369a-413f-a676-46dcb0957ffa\") " pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.448092 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97lf8\" (UniqueName: \"kubernetes.io/projected/83b2aa4d-369a-413f-a676-46dcb0957ffa-kube-api-access-97lf8\") pod \"redhat-marketplace-wfsbn\" (UID: \"83b2aa4d-369a-413f-a676-46dcb0957ffa\") " pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.548749 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b2aa4d-369a-413f-a676-46dcb0957ffa-catalog-content\") pod \"redhat-marketplace-wfsbn\" (UID: \"83b2aa4d-369a-413f-a676-46dcb0957ffa\") " pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.548818 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b2aa4d-369a-413f-a676-46dcb0957ffa-utilities\") pod \"redhat-marketplace-wfsbn\" (UID: \"83b2aa4d-369a-413f-a676-46dcb0957ffa\") " pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.548855 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97lf8\" (UniqueName: \"kubernetes.io/projected/83b2aa4d-369a-413f-a676-46dcb0957ffa-kube-api-access-97lf8\") pod \"redhat-marketplace-wfsbn\" (UID: \"83b2aa4d-369a-413f-a676-46dcb0957ffa\") " pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.549509 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b2aa4d-369a-413f-a676-46dcb0957ffa-catalog-content\") pod \"redhat-marketplace-wfsbn\" (UID: \"83b2aa4d-369a-413f-a676-46dcb0957ffa\") " pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.549594 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b2aa4d-369a-413f-a676-46dcb0957ffa-utilities\") pod \"redhat-marketplace-wfsbn\" (UID: \"83b2aa4d-369a-413f-a676-46dcb0957ffa\") " pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.570529 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97lf8\" (UniqueName: \"kubernetes.io/projected/83b2aa4d-369a-413f-a676-46dcb0957ffa-kube-api-access-97lf8\") pod \"redhat-marketplace-wfsbn\" (UID: \"83b2aa4d-369a-413f-a676-46dcb0957ffa\") " pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.616392 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:25:57 crc kubenswrapper[4788]: I1211 09:25:57.831089 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wfsbn"] Dec 11 09:25:57 crc kubenswrapper[4788]: W1211 09:25:57.846675 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83b2aa4d_369a_413f_a676_46dcb0957ffa.slice/crio-d38e38a6cb12ebae1f449783f3d0873199747b934a12d1f244d4fa8dce1c70ec WatchSource:0}: Error finding container d38e38a6cb12ebae1f449783f3d0873199747b934a12d1f244d4fa8dce1c70ec: Status 404 returned error can't find the container with id d38e38a6cb12ebae1f449783f3d0873199747b934a12d1f244d4fa8dce1c70ec Dec 11 09:25:58 crc kubenswrapper[4788]: I1211 09:25:58.420944 4788 generic.go:334] "Generic (PLEG): container finished" podID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" containerID="b7b50e0df525d3f1c50f5fdf7beac4bee7104b85c2982a13f94eb9a5dd9d045b" exitCode=0 Dec 11 09:25:58 crc kubenswrapper[4788]: I1211 09:25:58.421038 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9g2p" event={"ID":"e4f094f5-cc0c-4f3f-81af-36f4887fae9c","Type":"ContainerDied","Data":"b7b50e0df525d3f1c50f5fdf7beac4bee7104b85c2982a13f94eb9a5dd9d045b"} Dec 11 09:25:58 crc kubenswrapper[4788]: I1211 09:25:58.425564 4788 generic.go:334] "Generic (PLEG): container finished" podID="83b2aa4d-369a-413f-a676-46dcb0957ffa" containerID="0bda99056d12edf6ea9ba6526d8e367a5b0d05a17d3184b314bdd0e0f2b9d82f" exitCode=0 Dec 11 09:25:58 crc kubenswrapper[4788]: I1211 09:25:58.425665 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wfsbn" event={"ID":"83b2aa4d-369a-413f-a676-46dcb0957ffa","Type":"ContainerDied","Data":"0bda99056d12edf6ea9ba6526d8e367a5b0d05a17d3184b314bdd0e0f2b9d82f"} Dec 11 09:25:58 crc kubenswrapper[4788]: I1211 09:25:58.425840 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wfsbn" event={"ID":"83b2aa4d-369a-413f-a676-46dcb0957ffa","Type":"ContainerStarted","Data":"d38e38a6cb12ebae1f449783f3d0873199747b934a12d1f244d4fa8dce1c70ec"} Dec 11 09:25:58 crc kubenswrapper[4788]: I1211 09:25:58.427869 4788 generic.go:334] "Generic (PLEG): container finished" podID="8c91a81d-da68-4da8-9081-38be3d8f3213" containerID="725d73f431f3eebd1af03aac5b5a0b216bba6dacbbf19c9cf446899388afda5b" exitCode=0 Dec 11 09:25:58 crc kubenswrapper[4788]: I1211 09:25:58.427930 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dx56s" event={"ID":"8c91a81d-da68-4da8-9081-38be3d8f3213","Type":"ContainerDied","Data":"725d73f431f3eebd1af03aac5b5a0b216bba6dacbbf19c9cf446899388afda5b"} Dec 11 09:25:58 crc kubenswrapper[4788]: I1211 09:25:58.431651 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ts2m" event={"ID":"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3","Type":"ContainerStarted","Data":"a15378521de863ffbfbdac4a00769b11f6f1ba037239664305df4e71fd990201"} Dec 11 09:25:59 crc kubenswrapper[4788]: I1211 09:25:59.441607 4788 generic.go:334] "Generic (PLEG): container finished" podID="91bc93e0-4d8b-4b80-a02b-527c1c6e57f3" containerID="a15378521de863ffbfbdac4a00769b11f6f1ba037239664305df4e71fd990201" exitCode=0 Dec 11 09:25:59 crc kubenswrapper[4788]: I1211 09:25:59.441738 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ts2m" event={"ID":"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3","Type":"ContainerDied","Data":"a15378521de863ffbfbdac4a00769b11f6f1ba037239664305df4e71fd990201"} Dec 11 09:26:00 crc kubenswrapper[4788]: I1211 09:26:00.450478 4788 generic.go:334] "Generic (PLEG): container finished" podID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" containerID="373c5119de4de7184c01e51b474c59beb7682922ef162ccd46458eb3a5dba027" exitCode=0 Dec 11 09:26:00 crc kubenswrapper[4788]: I1211 09:26:00.450527 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9g2p" event={"ID":"e4f094f5-cc0c-4f3f-81af-36f4887fae9c","Type":"ContainerDied","Data":"373c5119de4de7184c01e51b474c59beb7682922ef162ccd46458eb3a5dba027"} Dec 11 09:26:01 crc kubenswrapper[4788]: I1211 09:26:01.461712 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dx56s" event={"ID":"8c91a81d-da68-4da8-9081-38be3d8f3213","Type":"ContainerStarted","Data":"cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2"} Dec 11 09:26:01 crc kubenswrapper[4788]: I1211 09:26:01.489567 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dx56s" podStartSLOduration=3.77483557 podStartE2EDuration="8.489542541s" podCreationTimestamp="2025-12-11 09:25:53 +0000 UTC" firstStartedPulling="2025-12-11 09:25:55.399922665 +0000 UTC m=+285.470702251" lastFinishedPulling="2025-12-11 09:26:00.114629596 +0000 UTC m=+290.185409222" observedRunningTime="2025-12-11 09:26:01.484518635 +0000 UTC m=+291.555298231" watchObservedRunningTime="2025-12-11 09:26:01.489542541 +0000 UTC m=+291.560322127" Dec 11 09:26:03 crc kubenswrapper[4788]: I1211 09:26:03.475041 4788 generic.go:334] "Generic (PLEG): container finished" podID="83b2aa4d-369a-413f-a676-46dcb0957ffa" containerID="56f3a74351bf66a1593720f88e681559dda76ebd5fd83ddae383ed83a1e81696" exitCode=0 Dec 11 09:26:03 crc kubenswrapper[4788]: I1211 09:26:03.475123 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wfsbn" event={"ID":"83b2aa4d-369a-413f-a676-46dcb0957ffa","Type":"ContainerDied","Data":"56f3a74351bf66a1593720f88e681559dda76ebd5fd83ddae383ed83a1e81696"} Dec 11 09:26:03 crc kubenswrapper[4788]: I1211 09:26:03.482104 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ts2m" event={"ID":"91bc93e0-4d8b-4b80-a02b-527c1c6e57f3","Type":"ContainerStarted","Data":"e9d634630021970cf21f8c1324894ecc54cf4488f204be4e3cfa3e94cc8a11c4"} Dec 11 09:26:03 crc kubenswrapper[4788]: I1211 09:26:03.487435 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9g2p" event={"ID":"e4f094f5-cc0c-4f3f-81af-36f4887fae9c","Type":"ContainerStarted","Data":"430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900"} Dec 11 09:26:03 crc kubenswrapper[4788]: I1211 09:26:03.524197 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h9g2p" podStartSLOduration=4.197984037 podStartE2EDuration="7.524174655s" podCreationTimestamp="2025-12-11 09:25:56 +0000 UTC" firstStartedPulling="2025-12-11 09:25:58.422513873 +0000 UTC m=+288.493293459" lastFinishedPulling="2025-12-11 09:26:01.748704481 +0000 UTC m=+291.819484077" observedRunningTime="2025-12-11 09:26:03.519209151 +0000 UTC m=+293.589988747" watchObservedRunningTime="2025-12-11 09:26:03.524174655 +0000 UTC m=+293.594954251" Dec 11 09:26:03 crc kubenswrapper[4788]: I1211 09:26:03.543559 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2ts2m" podStartSLOduration=4.242038907 podStartE2EDuration="9.543538988s" podCreationTimestamp="2025-12-11 09:25:54 +0000 UTC" firstStartedPulling="2025-12-11 09:25:56.406882363 +0000 UTC m=+286.477661959" lastFinishedPulling="2025-12-11 09:26:01.708382444 +0000 UTC m=+291.779162040" observedRunningTime="2025-12-11 09:26:03.543019365 +0000 UTC m=+293.613798951" watchObservedRunningTime="2025-12-11 09:26:03.543538988 +0000 UTC m=+293.614318574" Dec 11 09:26:04 crc kubenswrapper[4788]: I1211 09:26:04.219372 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:26:04 crc kubenswrapper[4788]: I1211 09:26:04.219462 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:26:04 crc kubenswrapper[4788]: I1211 09:26:04.267207 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:26:05 crc kubenswrapper[4788]: I1211 09:26:05.214549 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:26:05 crc kubenswrapper[4788]: I1211 09:26:05.215111 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:26:06 crc kubenswrapper[4788]: I1211 09:26:06.259996 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2ts2m" podUID="91bc93e0-4d8b-4b80-a02b-527c1c6e57f3" containerName="registry-server" probeResult="failure" output=< Dec 11 09:26:06 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 09:26:06 crc kubenswrapper[4788]: > Dec 11 09:26:06 crc kubenswrapper[4788]: I1211 09:26:06.618398 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:26:06 crc kubenswrapper[4788]: I1211 09:26:06.618922 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:26:06 crc kubenswrapper[4788]: I1211 09:26:06.659820 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:26:07 crc kubenswrapper[4788]: I1211 09:26:07.519495 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wfsbn" event={"ID":"83b2aa4d-369a-413f-a676-46dcb0957ffa","Type":"ContainerStarted","Data":"44ade062be614dac41d5518bb86228e3b3c9e70f3ebfa4baf649d85af0f82f5a"} Dec 11 09:26:07 crc kubenswrapper[4788]: I1211 09:26:07.546849 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wfsbn" podStartSLOduration=2.5961845329999997 podStartE2EDuration="10.54682536s" podCreationTimestamp="2025-12-11 09:25:57 +0000 UTC" firstStartedPulling="2025-12-11 09:25:58.427075947 +0000 UTC m=+288.497855533" lastFinishedPulling="2025-12-11 09:26:06.377716774 +0000 UTC m=+296.448496360" observedRunningTime="2025-12-11 09:26:07.544857661 +0000 UTC m=+297.615637257" watchObservedRunningTime="2025-12-11 09:26:07.54682536 +0000 UTC m=+297.617604946" Dec 11 09:26:07 crc kubenswrapper[4788]: I1211 09:26:07.572987 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h9g2p" Dec 11 09:26:07 crc kubenswrapper[4788]: I1211 09:26:07.635639 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:26:07 crc kubenswrapper[4788]: I1211 09:26:07.636647 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:26:08 crc kubenswrapper[4788]: I1211 09:26:08.681138 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-wfsbn" podUID="83b2aa4d-369a-413f-a676-46dcb0957ffa" containerName="registry-server" probeResult="failure" output=< Dec 11 09:26:08 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 09:26:08 crc kubenswrapper[4788]: > Dec 11 09:26:14 crc kubenswrapper[4788]: I1211 09:26:14.262118 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:26:15 crc kubenswrapper[4788]: I1211 09:26:15.261512 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:26:15 crc kubenswrapper[4788]: I1211 09:26:15.307437 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2ts2m" Dec 11 09:26:17 crc kubenswrapper[4788]: I1211 09:26:17.666074 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:26:17 crc kubenswrapper[4788]: I1211 09:26:17.713392 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wfsbn" Dec 11 09:26:51 crc kubenswrapper[4788]: I1211 09:26:51.369942 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:26:51 crc kubenswrapper[4788]: I1211 09:26:51.370882 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:27:21 crc kubenswrapper[4788]: I1211 09:27:21.370205 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:27:21 crc kubenswrapper[4788]: I1211 09:27:21.371100 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:27:51 crc kubenswrapper[4788]: I1211 09:27:51.369874 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:27:51 crc kubenswrapper[4788]: I1211 09:27:51.370912 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:27:51 crc kubenswrapper[4788]: I1211 09:27:51.370991 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:27:51 crc kubenswrapper[4788]: I1211 09:27:51.371902 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a382cc415c2d2e543d18d9790addd4098843b8970234d0ee00ccb1cff0184b99"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 09:27:51 crc kubenswrapper[4788]: I1211 09:27:51.371973 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://a382cc415c2d2e543d18d9790addd4098843b8970234d0ee00ccb1cff0184b99" gracePeriod=600 Dec 11 09:27:52 crc kubenswrapper[4788]: I1211 09:27:52.168249 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="a382cc415c2d2e543d18d9790addd4098843b8970234d0ee00ccb1cff0184b99" exitCode=0 Dec 11 09:27:52 crc kubenswrapper[4788]: I1211 09:27:52.168262 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"a382cc415c2d2e543d18d9790addd4098843b8970234d0ee00ccb1cff0184b99"} Dec 11 09:27:52 crc kubenswrapper[4788]: I1211 09:27:52.168956 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"c865389739d77cff25d3adc32729ba73ea13b37dc8e60f751c943a8b1a240c11"} Dec 11 09:27:52 crc kubenswrapper[4788]: I1211 09:27:52.168987 4788 scope.go:117] "RemoveContainer" containerID="607373e5c50f16bf23e11020a8b1f6a12dbe8b05ab7cbb3b21540e1d0962cdd3" Dec 11 09:29:51 crc kubenswrapper[4788]: I1211 09:29:51.369942 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:29:51 crc kubenswrapper[4788]: I1211 09:29:51.370859 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.173021 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj"] Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.174917 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.178015 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.180665 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.187248 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj"] Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.224370 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f4f5bdd-8029-4aa6-ac75-0160d3250669-secret-volume\") pod \"collect-profiles-29424090-p2svj\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.224466 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f4f5bdd-8029-4aa6-ac75-0160d3250669-config-volume\") pod \"collect-profiles-29424090-p2svj\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.224595 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkx46\" (UniqueName: \"kubernetes.io/projected/1f4f5bdd-8029-4aa6-ac75-0160d3250669-kube-api-access-xkx46\") pod \"collect-profiles-29424090-p2svj\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.325942 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f4f5bdd-8029-4aa6-ac75-0160d3250669-config-volume\") pod \"collect-profiles-29424090-p2svj\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.326031 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkx46\" (UniqueName: \"kubernetes.io/projected/1f4f5bdd-8029-4aa6-ac75-0160d3250669-kube-api-access-xkx46\") pod \"collect-profiles-29424090-p2svj\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.326079 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f4f5bdd-8029-4aa6-ac75-0160d3250669-secret-volume\") pod \"collect-profiles-29424090-p2svj\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.327434 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f4f5bdd-8029-4aa6-ac75-0160d3250669-config-volume\") pod \"collect-profiles-29424090-p2svj\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.333852 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f4f5bdd-8029-4aa6-ac75-0160d3250669-secret-volume\") pod \"collect-profiles-29424090-p2svj\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.349049 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkx46\" (UniqueName: \"kubernetes.io/projected/1f4f5bdd-8029-4aa6-ac75-0160d3250669-kube-api-access-xkx46\") pod \"collect-profiles-29424090-p2svj\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.498288 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:00 crc kubenswrapper[4788]: I1211 09:30:00.703162 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj"] Dec 11 09:30:01 crc kubenswrapper[4788]: I1211 09:30:01.360316 4788 generic.go:334] "Generic (PLEG): container finished" podID="1f4f5bdd-8029-4aa6-ac75-0160d3250669" containerID="f2b71bfe64f0904d44664492c2564a573ff1b84e56c1bc267e70ae3451537fb4" exitCode=0 Dec 11 09:30:01 crc kubenswrapper[4788]: I1211 09:30:01.360378 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" event={"ID":"1f4f5bdd-8029-4aa6-ac75-0160d3250669","Type":"ContainerDied","Data":"f2b71bfe64f0904d44664492c2564a573ff1b84e56c1bc267e70ae3451537fb4"} Dec 11 09:30:01 crc kubenswrapper[4788]: I1211 09:30:01.360413 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" event={"ID":"1f4f5bdd-8029-4aa6-ac75-0160d3250669","Type":"ContainerStarted","Data":"ce334af18e93f0185da18106d6ccd85c1e76d50d84b4eb7df60a3b23ba34d09e"} Dec 11 09:30:02 crc kubenswrapper[4788]: I1211 09:30:02.592496 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:02 crc kubenswrapper[4788]: I1211 09:30:02.661565 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f4f5bdd-8029-4aa6-ac75-0160d3250669-config-volume\") pod \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " Dec 11 09:30:02 crc kubenswrapper[4788]: I1211 09:30:02.661631 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkx46\" (UniqueName: \"kubernetes.io/projected/1f4f5bdd-8029-4aa6-ac75-0160d3250669-kube-api-access-xkx46\") pod \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " Dec 11 09:30:02 crc kubenswrapper[4788]: I1211 09:30:02.661686 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f4f5bdd-8029-4aa6-ac75-0160d3250669-secret-volume\") pod \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\" (UID: \"1f4f5bdd-8029-4aa6-ac75-0160d3250669\") " Dec 11 09:30:02 crc kubenswrapper[4788]: I1211 09:30:02.663031 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f4f5bdd-8029-4aa6-ac75-0160d3250669-config-volume" (OuterVolumeSpecName: "config-volume") pod "1f4f5bdd-8029-4aa6-ac75-0160d3250669" (UID: "1f4f5bdd-8029-4aa6-ac75-0160d3250669"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:30:02 crc kubenswrapper[4788]: I1211 09:30:02.669586 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f4f5bdd-8029-4aa6-ac75-0160d3250669-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1f4f5bdd-8029-4aa6-ac75-0160d3250669" (UID: "1f4f5bdd-8029-4aa6-ac75-0160d3250669"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:30:02 crc kubenswrapper[4788]: I1211 09:30:02.670657 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f4f5bdd-8029-4aa6-ac75-0160d3250669-kube-api-access-xkx46" (OuterVolumeSpecName: "kube-api-access-xkx46") pod "1f4f5bdd-8029-4aa6-ac75-0160d3250669" (UID: "1f4f5bdd-8029-4aa6-ac75-0160d3250669"). InnerVolumeSpecName "kube-api-access-xkx46". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:30:02 crc kubenswrapper[4788]: I1211 09:30:02.763738 4788 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f4f5bdd-8029-4aa6-ac75-0160d3250669-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 09:30:02 crc kubenswrapper[4788]: I1211 09:30:02.763805 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkx46\" (UniqueName: \"kubernetes.io/projected/1f4f5bdd-8029-4aa6-ac75-0160d3250669-kube-api-access-xkx46\") on node \"crc\" DevicePath \"\"" Dec 11 09:30:02 crc kubenswrapper[4788]: I1211 09:30:02.763827 4788 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f4f5bdd-8029-4aa6-ac75-0160d3250669-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 09:30:03 crc kubenswrapper[4788]: I1211 09:30:03.375886 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" event={"ID":"1f4f5bdd-8029-4aa6-ac75-0160d3250669","Type":"ContainerDied","Data":"ce334af18e93f0185da18106d6ccd85c1e76d50d84b4eb7df60a3b23ba34d09e"} Dec 11 09:30:03 crc kubenswrapper[4788]: I1211 09:30:03.376389 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce334af18e93f0185da18106d6ccd85c1e76d50d84b4eb7df60a3b23ba34d09e" Dec 11 09:30:03 crc kubenswrapper[4788]: I1211 09:30:03.375947 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj" Dec 11 09:30:21 crc kubenswrapper[4788]: I1211 09:30:21.369618 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:30:21 crc kubenswrapper[4788]: I1211 09:30:21.370488 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:30:51 crc kubenswrapper[4788]: I1211 09:30:51.369205 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:30:51 crc kubenswrapper[4788]: I1211 09:30:51.370118 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:30:51 crc kubenswrapper[4788]: I1211 09:30:51.370187 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:30:51 crc kubenswrapper[4788]: I1211 09:30:51.370903 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c865389739d77cff25d3adc32729ba73ea13b37dc8e60f751c943a8b1a240c11"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 09:30:51 crc kubenswrapper[4788]: I1211 09:30:51.370963 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://c865389739d77cff25d3adc32729ba73ea13b37dc8e60f751c943a8b1a240c11" gracePeriod=600 Dec 11 09:30:51 crc kubenswrapper[4788]: I1211 09:30:51.702768 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="c865389739d77cff25d3adc32729ba73ea13b37dc8e60f751c943a8b1a240c11" exitCode=0 Dec 11 09:30:51 crc kubenswrapper[4788]: I1211 09:30:51.702841 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"c865389739d77cff25d3adc32729ba73ea13b37dc8e60f751c943a8b1a240c11"} Dec 11 09:30:51 crc kubenswrapper[4788]: I1211 09:30:51.703097 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"00dba22c423f7be20758dd52c8556ca67b0617820fa8e27466826983b382a0ca"} Dec 11 09:30:51 crc kubenswrapper[4788]: I1211 09:30:51.703122 4788 scope.go:117] "RemoveContainer" containerID="a382cc415c2d2e543d18d9790addd4098843b8970234d0ee00ccb1cff0184b99" Dec 11 09:31:59 crc kubenswrapper[4788]: I1211 09:31:59.814388 4788 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 09:32:51 crc kubenswrapper[4788]: I1211 09:32:51.369801 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:32:51 crc kubenswrapper[4788]: I1211 09:32:51.370949 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:33:21 crc kubenswrapper[4788]: I1211 09:33:21.369425 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:33:21 crc kubenswrapper[4788]: I1211 09:33:21.370513 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:33:51 crc kubenswrapper[4788]: I1211 09:33:51.368858 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:33:51 crc kubenswrapper[4788]: I1211 09:33:51.369918 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:33:51 crc kubenswrapper[4788]: I1211 09:33:51.369986 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:33:51 crc kubenswrapper[4788]: I1211 09:33:51.370719 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"00dba22c423f7be20758dd52c8556ca67b0617820fa8e27466826983b382a0ca"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 09:33:51 crc kubenswrapper[4788]: I1211 09:33:51.370804 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://00dba22c423f7be20758dd52c8556ca67b0617820fa8e27466826983b382a0ca" gracePeriod=600 Dec 11 09:33:52 crc kubenswrapper[4788]: I1211 09:33:52.372086 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="00dba22c423f7be20758dd52c8556ca67b0617820fa8e27466826983b382a0ca" exitCode=0 Dec 11 09:33:52 crc kubenswrapper[4788]: I1211 09:33:52.372174 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"00dba22c423f7be20758dd52c8556ca67b0617820fa8e27466826983b382a0ca"} Dec 11 09:33:52 crc kubenswrapper[4788]: I1211 09:33:52.373035 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"5b3f817de5e2b917218a768a7a64c16521218e5cdb66d80136d52d5195848c43"} Dec 11 09:33:52 crc kubenswrapper[4788]: I1211 09:33:52.373063 4788 scope.go:117] "RemoveContainer" containerID="c865389739d77cff25d3adc32729ba73ea13b37dc8e60f751c943a8b1a240c11" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.076309 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-t6sfj"] Dec 11 09:34:03 crc kubenswrapper[4788]: E1211 09:34:03.077499 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f4f5bdd-8029-4aa6-ac75-0160d3250669" containerName="collect-profiles" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.077516 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f4f5bdd-8029-4aa6-ac75-0160d3250669" containerName="collect-profiles" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.077657 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f4f5bdd-8029-4aa6-ac75-0160d3250669" containerName="collect-profiles" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.078197 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-t6sfj" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.086639 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.087108 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.087129 4788 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-b5pm7" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.094568 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hq8gs"] Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.108357 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-hq8gs" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.113139 4788 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-gf4d9" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.119965 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-t6sfj"] Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.126098 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdr5h\" (UniqueName: \"kubernetes.io/projected/0493623c-4b7f-4f28-a1bb-9303b031d9a0-kube-api-access-wdr5h\") pod \"cert-manager-cainjector-7f985d654d-t6sfj\" (UID: \"0493623c-4b7f-4f28-a1bb-9303b031d9a0\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-t6sfj" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.143761 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hq8gs"] Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.150403 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-697m4"] Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.151336 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-697m4" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.153889 4788 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-vhs8r" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.162308 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-697m4"] Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.228044 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkgtd\" (UniqueName: \"kubernetes.io/projected/dea5ca68-6922-46e4-81ed-8c917c670214-kube-api-access-hkgtd\") pod \"cert-manager-webhook-5655c58dd6-697m4\" (UID: \"dea5ca68-6922-46e4-81ed-8c917c670214\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-697m4" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.228147 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdr5h\" (UniqueName: \"kubernetes.io/projected/0493623c-4b7f-4f28-a1bb-9303b031d9a0-kube-api-access-wdr5h\") pod \"cert-manager-cainjector-7f985d654d-t6sfj\" (UID: \"0493623c-4b7f-4f28-a1bb-9303b031d9a0\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-t6sfj" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.228176 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pxfc\" (UniqueName: \"kubernetes.io/projected/c6917182-a497-4802-8747-4a6c3e78a11f-kube-api-access-2pxfc\") pod \"cert-manager-5b446d88c5-hq8gs\" (UID: \"c6917182-a497-4802-8747-4a6c3e78a11f\") " pod="cert-manager/cert-manager-5b446d88c5-hq8gs" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.249616 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdr5h\" (UniqueName: \"kubernetes.io/projected/0493623c-4b7f-4f28-a1bb-9303b031d9a0-kube-api-access-wdr5h\") pod \"cert-manager-cainjector-7f985d654d-t6sfj\" (UID: \"0493623c-4b7f-4f28-a1bb-9303b031d9a0\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-t6sfj" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.329917 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkgtd\" (UniqueName: \"kubernetes.io/projected/dea5ca68-6922-46e4-81ed-8c917c670214-kube-api-access-hkgtd\") pod \"cert-manager-webhook-5655c58dd6-697m4\" (UID: \"dea5ca68-6922-46e4-81ed-8c917c670214\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-697m4" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.330036 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pxfc\" (UniqueName: \"kubernetes.io/projected/c6917182-a497-4802-8747-4a6c3e78a11f-kube-api-access-2pxfc\") pod \"cert-manager-5b446d88c5-hq8gs\" (UID: \"c6917182-a497-4802-8747-4a6c3e78a11f\") " pod="cert-manager/cert-manager-5b446d88c5-hq8gs" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.347683 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkgtd\" (UniqueName: \"kubernetes.io/projected/dea5ca68-6922-46e4-81ed-8c917c670214-kube-api-access-hkgtd\") pod \"cert-manager-webhook-5655c58dd6-697m4\" (UID: \"dea5ca68-6922-46e4-81ed-8c917c670214\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-697m4" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.347691 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pxfc\" (UniqueName: \"kubernetes.io/projected/c6917182-a497-4802-8747-4a6c3e78a11f-kube-api-access-2pxfc\") pod \"cert-manager-5b446d88c5-hq8gs\" (UID: \"c6917182-a497-4802-8747-4a6c3e78a11f\") " pod="cert-manager/cert-manager-5b446d88c5-hq8gs" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.417951 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-t6sfj" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.454957 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-hq8gs" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.468792 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-697m4" Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.746075 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hq8gs"] Dec 11 09:34:03 crc kubenswrapper[4788]: I1211 09:34:03.758355 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 09:34:04 crc kubenswrapper[4788]: I1211 09:34:04.003045 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-t6sfj"] Dec 11 09:34:04 crc kubenswrapper[4788]: W1211 09:34:04.007289 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0493623c_4b7f_4f28_a1bb_9303b031d9a0.slice/crio-ba2860e6233d04d03e06dc63fb9c76d4f57625408fff37b48f9c810929113627 WatchSource:0}: Error finding container ba2860e6233d04d03e06dc63fb9c76d4f57625408fff37b48f9c810929113627: Status 404 returned error can't find the container with id ba2860e6233d04d03e06dc63fb9c76d4f57625408fff37b48f9c810929113627 Dec 11 09:34:04 crc kubenswrapper[4788]: I1211 09:34:04.008266 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-697m4"] Dec 11 09:34:04 crc kubenswrapper[4788]: W1211 09:34:04.011807 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddea5ca68_6922_46e4_81ed_8c917c670214.slice/crio-ac2aa7107ffec44a16514a3b97d4dbf25f27c56824ad18672eb499ab42aaec69 WatchSource:0}: Error finding container ac2aa7107ffec44a16514a3b97d4dbf25f27c56824ad18672eb499ab42aaec69: Status 404 returned error can't find the container with id ac2aa7107ffec44a16514a3b97d4dbf25f27c56824ad18672eb499ab42aaec69 Dec 11 09:34:04 crc kubenswrapper[4788]: I1211 09:34:04.461648 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-hq8gs" event={"ID":"c6917182-a497-4802-8747-4a6c3e78a11f","Type":"ContainerStarted","Data":"1cd83a09a19ca0bb77f46bee3d363495a76af7975039cf5c1273180e3caee6db"} Dec 11 09:34:04 crc kubenswrapper[4788]: I1211 09:34:04.463014 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-t6sfj" event={"ID":"0493623c-4b7f-4f28-a1bb-9303b031d9a0","Type":"ContainerStarted","Data":"ba2860e6233d04d03e06dc63fb9c76d4f57625408fff37b48f9c810929113627"} Dec 11 09:34:04 crc kubenswrapper[4788]: I1211 09:34:04.464819 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-697m4" event={"ID":"dea5ca68-6922-46e4-81ed-8c917c670214","Type":"ContainerStarted","Data":"ac2aa7107ffec44a16514a3b97d4dbf25f27c56824ad18672eb499ab42aaec69"} Dec 11 09:34:07 crc kubenswrapper[4788]: I1211 09:34:07.486408 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-hq8gs" event={"ID":"c6917182-a497-4802-8747-4a6c3e78a11f","Type":"ContainerStarted","Data":"77ee12d6ef568bb4fd0462c27f63dfed94a011cf6a8d2ffbfebb69f7a683aafd"} Dec 11 09:34:07 crc kubenswrapper[4788]: I1211 09:34:07.489027 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-t6sfj" event={"ID":"0493623c-4b7f-4f28-a1bb-9303b031d9a0","Type":"ContainerStarted","Data":"f28172ab77bc2418ce878e8aa6c9048e94a1f0987810e53852f908727d9f883f"} Dec 11 09:34:07 crc kubenswrapper[4788]: I1211 09:34:07.505358 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-hq8gs" podStartSLOduration=1.142197271 podStartE2EDuration="4.505328555s" podCreationTimestamp="2025-12-11 09:34:03 +0000 UTC" firstStartedPulling="2025-12-11 09:34:03.757941062 +0000 UTC m=+773.828720648" lastFinishedPulling="2025-12-11 09:34:07.121072346 +0000 UTC m=+777.191851932" observedRunningTime="2025-12-11 09:34:07.501751072 +0000 UTC m=+777.572530668" watchObservedRunningTime="2025-12-11 09:34:07.505328555 +0000 UTC m=+777.576108141" Dec 11 09:34:07 crc kubenswrapper[4788]: I1211 09:34:07.538912 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-t6sfj" podStartSLOduration=1.504023788 podStartE2EDuration="4.538873822s" podCreationTimestamp="2025-12-11 09:34:03 +0000 UTC" firstStartedPulling="2025-12-11 09:34:04.009439246 +0000 UTC m=+774.080218832" lastFinishedPulling="2025-12-11 09:34:07.04428928 +0000 UTC m=+777.115068866" observedRunningTime="2025-12-11 09:34:07.522060687 +0000 UTC m=+777.592840273" watchObservedRunningTime="2025-12-11 09:34:07.538873822 +0000 UTC m=+777.609653408" Dec 11 09:34:08 crc kubenswrapper[4788]: I1211 09:34:08.503287 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-697m4" event={"ID":"dea5ca68-6922-46e4-81ed-8c917c670214","Type":"ContainerStarted","Data":"05445f7bc0494a470fe96d1a3dd035add95a8cf066723643af2ff7d50315cf7f"} Dec 11 09:34:08 crc kubenswrapper[4788]: I1211 09:34:08.516417 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-697m4" podStartSLOduration=1.388437709 podStartE2EDuration="5.516390565s" podCreationTimestamp="2025-12-11 09:34:03 +0000 UTC" firstStartedPulling="2025-12-11 09:34:04.014756544 +0000 UTC m=+774.085536130" lastFinishedPulling="2025-12-11 09:34:08.1427094 +0000 UTC m=+778.213488986" observedRunningTime="2025-12-11 09:34:08.513618903 +0000 UTC m=+778.584398509" watchObservedRunningTime="2025-12-11 09:34:08.516390565 +0000 UTC m=+778.587170151" Dec 11 09:34:09 crc kubenswrapper[4788]: I1211 09:34:09.505549 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-697m4" Dec 11 09:34:12 crc kubenswrapper[4788]: I1211 09:34:12.911133 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-twvxc"] Dec 11 09:34:12 crc kubenswrapper[4788]: I1211 09:34:12.911899 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovn-controller" containerID="cri-o://a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184" gracePeriod=30 Dec 11 09:34:12 crc kubenswrapper[4788]: I1211 09:34:12.912039 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="sbdb" containerID="cri-o://eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321" gracePeriod=30 Dec 11 09:34:12 crc kubenswrapper[4788]: I1211 09:34:12.912083 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="nbdb" containerID="cri-o://ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914" gracePeriod=30 Dec 11 09:34:12 crc kubenswrapper[4788]: I1211 09:34:12.912081 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovn-acl-logging" containerID="cri-o://74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a" gracePeriod=30 Dec 11 09:34:12 crc kubenswrapper[4788]: I1211 09:34:12.912183 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="northd" containerID="cri-o://b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40" gracePeriod=30 Dec 11 09:34:12 crc kubenswrapper[4788]: I1211 09:34:12.912298 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19" gracePeriod=30 Dec 11 09:34:12 crc kubenswrapper[4788]: I1211 09:34:12.912348 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="kube-rbac-proxy-node" containerID="cri-o://fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f" gracePeriod=30 Dec 11 09:34:12 crc kubenswrapper[4788]: I1211 09:34:12.992971 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovnkube-controller" containerID="cri-o://457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe" gracePeriod=30 Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.297853 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvxc_3541a671-d810-482f-bf54-71e8f344b788/ovn-acl-logging/1.log" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.300917 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvxc_3541a671-d810-482f-bf54-71e8f344b788/ovn-acl-logging/0.log" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.301713 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvxc_3541a671-d810-482f-bf54-71e8f344b788/ovn-controller/0.log" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.302286 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375543 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-netns\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375620 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-var-lib-openvswitch\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375646 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-bin\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375676 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-netd\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375706 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-var-lib-cni-networks-ovn-kubernetes\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375697 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375737 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-node-log\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375772 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375778 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-config\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375855 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-ovn-kubernetes\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375886 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-ovn\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375918 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-slash\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375958 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-env-overrides\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.375996 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-etc-openvswitch\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376020 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-systemd-units\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376040 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-kubelet\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376064 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-openvswitch\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376097 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlknk\" (UniqueName: \"kubernetes.io/projected/3541a671-d810-482f-bf54-71e8f344b788-kube-api-access-wlknk\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376159 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-script-lib\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376180 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-systemd\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376221 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-log-socket\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376268 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3541a671-d810-482f-bf54-71e8f344b788-ovn-node-metrics-cert\") pod \"3541a671-d810-482f-bf54-71e8f344b788\" (UID: \"3541a671-d810-482f-bf54-71e8f344b788\") " Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376459 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376495 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376513 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-node-log" (OuterVolumeSpecName: "node-log") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376522 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376547 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376573 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376571 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376599 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376532 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376602 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376622 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-slash" (OuterVolumeSpecName: "host-slash") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376858 4788 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376878 4788 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376892 4788 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376905 4788 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376909 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376919 4788 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376937 4788 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-node-log\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376942 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376950 4788 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376963 4788 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376979 4788 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.376995 4788 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-slash\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.377010 4788 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.377022 4788 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.377034 4788 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.377072 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-log-socket" (OuterVolumeSpecName: "log-socket") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.377478 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.400019 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3541a671-d810-482f-bf54-71e8f344b788-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.402616 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3541a671-d810-482f-bf54-71e8f344b788-kube-api-access-wlknk" (OuterVolumeSpecName: "kube-api-access-wlknk") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "kube-api-access-wlknk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.408772 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "3541a671-d810-482f-bf54-71e8f344b788" (UID: "3541a671-d810-482f-bf54-71e8f344b788"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.408877 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vp5fg"] Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.409075 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="northd" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409094 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="northd" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.409104 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovn-acl-logging" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409111 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovn-acl-logging" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.409120 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="nbdb" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409126 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="nbdb" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.409136 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovn-controller" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409142 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovn-controller" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.409150 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="kube-rbac-proxy-node" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409157 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="kube-rbac-proxy-node" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.409168 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="sbdb" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409174 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="sbdb" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.409182 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="kubecfg-setup" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409188 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="kubecfg-setup" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.409199 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovnkube-controller" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409206 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovnkube-controller" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.409221 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409245 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409351 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="sbdb" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409361 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409372 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="nbdb" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409381 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovn-controller" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409387 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="northd" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409394 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovn-acl-logging" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409401 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovnkube-controller" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409409 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="kube-rbac-proxy-node" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409418 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovn-acl-logging" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.409524 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovn-acl-logging" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.409531 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3541a671-d810-482f-bf54-71e8f344b788" containerName="ovn-acl-logging" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.411142 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.471978 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-697m4" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478092 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478163 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-cni-bin\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478197 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-run-netns\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478314 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-slash\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478356 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-run-ovn-kubernetes\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478382 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-systemd-units\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478410 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f774bb21-cdcf-4740-ae2d-a91b406c4657-ovnkube-config\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478437 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f774bb21-cdcf-4740-ae2d-a91b406c4657-ovn-node-metrics-cert\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478465 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-run-systemd\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478492 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-run-openvswitch\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478519 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4lwb\" (UniqueName: \"kubernetes.io/projected/f774bb21-cdcf-4740-ae2d-a91b406c4657-kube-api-access-p4lwb\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478547 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-node-log\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478578 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-kubelet\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478612 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-cni-netd\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478646 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-run-ovn\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478670 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f774bb21-cdcf-4740-ae2d-a91b406c4657-env-overrides\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478707 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-var-lib-openvswitch\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478738 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-etc-openvswitch\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478764 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-log-socket\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478786 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f774bb21-cdcf-4740-ae2d-a91b406c4657-ovnkube-script-lib\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478826 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlknk\" (UniqueName: \"kubernetes.io/projected/3541a671-d810-482f-bf54-71e8f344b788-kube-api-access-wlknk\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478837 4788 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478848 4788 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478859 4788 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-log-socket\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478870 4788 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3541a671-d810-482f-bf54-71e8f344b788-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478881 4788 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3541a671-d810-482f-bf54-71e8f344b788-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.478890 4788 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3541a671-d810-482f-bf54-71e8f344b788-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.530857 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gtkxj_a3cea019-16b2-4a01-a945-cd2b37745330/kube-multus/0.log" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.530913 4788 generic.go:334] "Generic (PLEG): container finished" podID="a3cea019-16b2-4a01-a945-cd2b37745330" containerID="fc22f8b9744bc5587881d46a768d97e9eef1660f056492bc4b117e2eb7c5040c" exitCode=2 Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.530974 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gtkxj" event={"ID":"a3cea019-16b2-4a01-a945-cd2b37745330","Type":"ContainerDied","Data":"fc22f8b9744bc5587881d46a768d97e9eef1660f056492bc4b117e2eb7c5040c"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.531570 4788 scope.go:117] "RemoveContainer" containerID="fc22f8b9744bc5587881d46a768d97e9eef1660f056492bc4b117e2eb7c5040c" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.534825 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvxc_3541a671-d810-482f-bf54-71e8f344b788/ovn-acl-logging/1.log" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.538422 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvxc_3541a671-d810-482f-bf54-71e8f344b788/ovn-acl-logging/0.log" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.541435 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-twvxc_3541a671-d810-482f-bf54-71e8f344b788/ovn-controller/0.log" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542121 4788 generic.go:334] "Generic (PLEG): container finished" podID="3541a671-d810-482f-bf54-71e8f344b788" containerID="74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a" exitCode=143 Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542249 4788 generic.go:334] "Generic (PLEG): container finished" podID="3541a671-d810-482f-bf54-71e8f344b788" containerID="457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe" exitCode=0 Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542328 4788 generic.go:334] "Generic (PLEG): container finished" podID="3541a671-d810-482f-bf54-71e8f344b788" containerID="eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321" exitCode=0 Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542390 4788 generic.go:334] "Generic (PLEG): container finished" podID="3541a671-d810-482f-bf54-71e8f344b788" containerID="ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914" exitCode=0 Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542457 4788 generic.go:334] "Generic (PLEG): container finished" podID="3541a671-d810-482f-bf54-71e8f344b788" containerID="b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40" exitCode=0 Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542521 4788 generic.go:334] "Generic (PLEG): container finished" podID="3541a671-d810-482f-bf54-71e8f344b788" containerID="ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19" exitCode=0 Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542584 4788 generic.go:334] "Generic (PLEG): container finished" podID="3541a671-d810-482f-bf54-71e8f344b788" containerID="fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f" exitCode=0 Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542657 4788 generic.go:334] "Generic (PLEG): container finished" podID="3541a671-d810-482f-bf54-71e8f344b788" containerID="a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184" exitCode=143 Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542262 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerDied","Data":"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542852 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerDied","Data":"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542946 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerDied","Data":"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543017 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerDied","Data":"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543078 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerDied","Data":"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543145 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerDied","Data":"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.542267 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543220 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543411 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543488 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543558 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543625 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543695 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543759 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543831 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543899 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.543964 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerDied","Data":"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544043 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544109 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544170 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544252 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544318 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544389 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544455 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544508 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544564 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544625 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544691 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerDied","Data":"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544777 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544852 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544921 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545005 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545069 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545129 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545198 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545338 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545415 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.544022 4788 scope.go:117] "RemoveContainer" containerID="74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545482 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545612 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-twvxc" event={"ID":"3541a671-d810-482f-bf54-71e8f344b788","Type":"ContainerDied","Data":"63fbb2ae13e61224c0d33da7c84d5f83f442e3e4288944ef566f9070b8284f88"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545649 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545669 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545676 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545682 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545688 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545694 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545700 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545705 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545711 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.545719 4788 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940"} Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.568195 4788 scope.go:117] "RemoveContainer" containerID="457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.581294 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-run-systemd\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.581637 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-run-systemd\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.581643 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-run-openvswitch\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.581798 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4lwb\" (UniqueName: \"kubernetes.io/projected/f774bb21-cdcf-4740-ae2d-a91b406c4657-kube-api-access-p4lwb\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.581868 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-node-log\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.581934 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-kubelet\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.581995 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-cni-netd\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.582080 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-run-openvswitch\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.582203 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-node-log\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.582153 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-run-ovn\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.582106 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-run-ovn\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.582453 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-kubelet\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.582503 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-cni-netd\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.583905 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f774bb21-cdcf-4740-ae2d-a91b406c4657-env-overrides\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584067 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-var-lib-openvswitch\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584108 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-etc-openvswitch\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584116 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-var-lib-openvswitch\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584152 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-log-socket\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584176 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-etc-openvswitch\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584182 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f774bb21-cdcf-4740-ae2d-a91b406c4657-ovnkube-script-lib\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584238 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-log-socket\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584289 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584335 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-cni-bin\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584367 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-run-netns\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584560 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-slash\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584604 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-run-ovn-kubernetes\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584637 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-systemd-units\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584673 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f774bb21-cdcf-4740-ae2d-a91b406c4657-ovnkube-config\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.584706 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f774bb21-cdcf-4740-ae2d-a91b406c4657-ovn-node-metrics-cert\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.585012 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f774bb21-cdcf-4740-ae2d-a91b406c4657-ovnkube-script-lib\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.585436 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-cni-bin\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.585520 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.585558 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-run-netns\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.585575 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-run-ovn-kubernetes\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.585626 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-host-slash\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.585665 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f774bb21-cdcf-4740-ae2d-a91b406c4657-systemd-units\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.586202 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f774bb21-cdcf-4740-ae2d-a91b406c4657-ovnkube-config\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.586787 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f774bb21-cdcf-4740-ae2d-a91b406c4657-env-overrides\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.589386 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f774bb21-cdcf-4740-ae2d-a91b406c4657-ovn-node-metrics-cert\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.591781 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-twvxc"] Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.595384 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-twvxc"] Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.598827 4788 scope.go:117] "RemoveContainer" containerID="eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.607388 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4lwb\" (UniqueName: \"kubernetes.io/projected/f774bb21-cdcf-4740-ae2d-a91b406c4657-kube-api-access-p4lwb\") pod \"ovnkube-node-vp5fg\" (UID: \"f774bb21-cdcf-4740-ae2d-a91b406c4657\") " pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.614278 4788 scope.go:117] "RemoveContainer" containerID="ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.629098 4788 scope.go:117] "RemoveContainer" containerID="b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.648002 4788 scope.go:117] "RemoveContainer" containerID="ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.664908 4788 scope.go:117] "RemoveContainer" containerID="fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.684858 4788 scope.go:117] "RemoveContainer" containerID="3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.704052 4788 scope.go:117] "RemoveContainer" containerID="a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.719321 4788 scope.go:117] "RemoveContainer" containerID="2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.728377 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.732877 4788 scope.go:117] "RemoveContainer" containerID="74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.733299 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a\": container with ID starting with 74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a not found: ID does not exist" containerID="74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.733426 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a"} err="failed to get container status \"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a\": rpc error: code = NotFound desc = could not find container \"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a\": container with ID starting with 74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.733548 4788 scope.go:117] "RemoveContainer" containerID="457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.734078 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe\": container with ID starting with 457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe not found: ID does not exist" containerID="457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.734185 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe"} err="failed to get container status \"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe\": rpc error: code = NotFound desc = could not find container \"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe\": container with ID starting with 457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.734335 4788 scope.go:117] "RemoveContainer" containerID="eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.734635 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321\": container with ID starting with eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321 not found: ID does not exist" containerID="eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.734739 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321"} err="failed to get container status \"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321\": rpc error: code = NotFound desc = could not find container \"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321\": container with ID starting with eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.734823 4788 scope.go:117] "RemoveContainer" containerID="ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.735380 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914\": container with ID starting with ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914 not found: ID does not exist" containerID="ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.735430 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914"} err="failed to get container status \"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914\": rpc error: code = NotFound desc = could not find container \"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914\": container with ID starting with ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.735465 4788 scope.go:117] "RemoveContainer" containerID="b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.735955 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40\": container with ID starting with b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40 not found: ID does not exist" containerID="b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.736070 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40"} err="failed to get container status \"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40\": rpc error: code = NotFound desc = could not find container \"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40\": container with ID starting with b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.736161 4788 scope.go:117] "RemoveContainer" containerID="ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.737212 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19\": container with ID starting with ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19 not found: ID does not exist" containerID="ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.737375 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19"} err="failed to get container status \"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19\": rpc error: code = NotFound desc = could not find container \"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19\": container with ID starting with ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.737465 4788 scope.go:117] "RemoveContainer" containerID="fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.740568 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f\": container with ID starting with fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f not found: ID does not exist" containerID="fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.741124 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f"} err="failed to get container status \"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f\": rpc error: code = NotFound desc = could not find container \"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f\": container with ID starting with fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.741262 4788 scope.go:117] "RemoveContainer" containerID="3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.742023 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3\": container with ID starting with 3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3 not found: ID does not exist" containerID="3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.742079 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3"} err="failed to get container status \"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3\": rpc error: code = NotFound desc = could not find container \"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3\": container with ID starting with 3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.742120 4788 scope.go:117] "RemoveContainer" containerID="a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.742664 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184\": container with ID starting with a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184 not found: ID does not exist" containerID="a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.742691 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184"} err="failed to get container status \"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184\": rpc error: code = NotFound desc = could not find container \"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184\": container with ID starting with a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.742711 4788 scope.go:117] "RemoveContainer" containerID="2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940" Dec 11 09:34:13 crc kubenswrapper[4788]: E1211 09:34:13.742999 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940\": container with ID starting with 2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940 not found: ID does not exist" containerID="2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.743022 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940"} err="failed to get container status \"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940\": rpc error: code = NotFound desc = could not find container \"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940\": container with ID starting with 2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.743038 4788 scope.go:117] "RemoveContainer" containerID="74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.746136 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a"} err="failed to get container status \"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a\": rpc error: code = NotFound desc = could not find container \"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a\": container with ID starting with 74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.746184 4788 scope.go:117] "RemoveContainer" containerID="457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.746874 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe"} err="failed to get container status \"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe\": rpc error: code = NotFound desc = could not find container \"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe\": container with ID starting with 457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.746911 4788 scope.go:117] "RemoveContainer" containerID="eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.747541 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321"} err="failed to get container status \"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321\": rpc error: code = NotFound desc = could not find container \"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321\": container with ID starting with eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.747609 4788 scope.go:117] "RemoveContainer" containerID="ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.756502 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914"} err="failed to get container status \"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914\": rpc error: code = NotFound desc = could not find container \"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914\": container with ID starting with ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.756576 4788 scope.go:117] "RemoveContainer" containerID="b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.757278 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40"} err="failed to get container status \"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40\": rpc error: code = NotFound desc = could not find container \"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40\": container with ID starting with b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.757345 4788 scope.go:117] "RemoveContainer" containerID="ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.758040 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19"} err="failed to get container status \"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19\": rpc error: code = NotFound desc = could not find container \"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19\": container with ID starting with ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.758084 4788 scope.go:117] "RemoveContainer" containerID="fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.758665 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f"} err="failed to get container status \"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f\": rpc error: code = NotFound desc = could not find container \"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f\": container with ID starting with fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.758742 4788 scope.go:117] "RemoveContainer" containerID="3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.759382 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3"} err="failed to get container status \"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3\": rpc error: code = NotFound desc = could not find container \"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3\": container with ID starting with 3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.759416 4788 scope.go:117] "RemoveContainer" containerID="a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.760778 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184"} err="failed to get container status \"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184\": rpc error: code = NotFound desc = could not find container \"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184\": container with ID starting with a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.760810 4788 scope.go:117] "RemoveContainer" containerID="2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.761179 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940"} err="failed to get container status \"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940\": rpc error: code = NotFound desc = could not find container \"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940\": container with ID starting with 2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.761221 4788 scope.go:117] "RemoveContainer" containerID="74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.761610 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a"} err="failed to get container status \"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a\": rpc error: code = NotFound desc = could not find container \"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a\": container with ID starting with 74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.761640 4788 scope.go:117] "RemoveContainer" containerID="457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.761971 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe"} err="failed to get container status \"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe\": rpc error: code = NotFound desc = could not find container \"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe\": container with ID starting with 457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.762088 4788 scope.go:117] "RemoveContainer" containerID="eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.766480 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321"} err="failed to get container status \"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321\": rpc error: code = NotFound desc = could not find container \"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321\": container with ID starting with eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.766506 4788 scope.go:117] "RemoveContainer" containerID="ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.766823 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914"} err="failed to get container status \"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914\": rpc error: code = NotFound desc = could not find container \"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914\": container with ID starting with ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.766957 4788 scope.go:117] "RemoveContainer" containerID="b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.767362 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40"} err="failed to get container status \"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40\": rpc error: code = NotFound desc = could not find container \"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40\": container with ID starting with b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.767438 4788 scope.go:117] "RemoveContainer" containerID="ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.767744 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19"} err="failed to get container status \"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19\": rpc error: code = NotFound desc = could not find container \"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19\": container with ID starting with ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.767831 4788 scope.go:117] "RemoveContainer" containerID="fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.768170 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f"} err="failed to get container status \"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f\": rpc error: code = NotFound desc = could not find container \"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f\": container with ID starting with fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.768270 4788 scope.go:117] "RemoveContainer" containerID="3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.768677 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3"} err="failed to get container status \"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3\": rpc error: code = NotFound desc = could not find container \"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3\": container with ID starting with 3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.768767 4788 scope.go:117] "RemoveContainer" containerID="a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.769086 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184"} err="failed to get container status \"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184\": rpc error: code = NotFound desc = could not find container \"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184\": container with ID starting with a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.769175 4788 scope.go:117] "RemoveContainer" containerID="2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.769518 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940"} err="failed to get container status \"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940\": rpc error: code = NotFound desc = could not find container \"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940\": container with ID starting with 2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.770223 4788 scope.go:117] "RemoveContainer" containerID="74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.770772 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a"} err="failed to get container status \"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a\": rpc error: code = NotFound desc = could not find container \"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a\": container with ID starting with 74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.770837 4788 scope.go:117] "RemoveContainer" containerID="457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.771256 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe"} err="failed to get container status \"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe\": rpc error: code = NotFound desc = could not find container \"457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe\": container with ID starting with 457430ee171d164dfb9bb6f925bdb92d232703b25d7aa3f7d23c673f75bf4ffe not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.771378 4788 scope.go:117] "RemoveContainer" containerID="eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.771879 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321"} err="failed to get container status \"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321\": rpc error: code = NotFound desc = could not find container \"eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321\": container with ID starting with eb74e3198af7d09c160de95dfa4279e99995725dfd491377d578b21bff0a7321 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.771906 4788 scope.go:117] "RemoveContainer" containerID="ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.772210 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914"} err="failed to get container status \"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914\": rpc error: code = NotFound desc = could not find container \"ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914\": container with ID starting with ce57bef1dc66150cb48416849fe5fd210ea24f0f937258f6bc4964d68f073914 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.772259 4788 scope.go:117] "RemoveContainer" containerID="b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.772570 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40"} err="failed to get container status \"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40\": rpc error: code = NotFound desc = could not find container \"b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40\": container with ID starting with b36a92a22c8d0b5c5ff340e09757f373d57b454e9f385d6e4e89a093ddb49a40 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.772590 4788 scope.go:117] "RemoveContainer" containerID="ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.772813 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19"} err="failed to get container status \"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19\": rpc error: code = NotFound desc = could not find container \"ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19\": container with ID starting with ea059744d3c91f4ed4ae7414e5127f1b9ce2486026660974e0ac96e11bb26b19 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.772838 4788 scope.go:117] "RemoveContainer" containerID="fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.773087 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f"} err="failed to get container status \"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f\": rpc error: code = NotFound desc = could not find container \"fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f\": container with ID starting with fa2d8cbefcd132304d81390a29f23cf63edd104792e0d4815fc6f09b0065a79f not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.773116 4788 scope.go:117] "RemoveContainer" containerID="3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.773604 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3"} err="failed to get container status \"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3\": rpc error: code = NotFound desc = could not find container \"3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3\": container with ID starting with 3cc9efb8bf5ab934b963db60bebaeb037a9096e2971e142e406c711ec63447b3 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.773631 4788 scope.go:117] "RemoveContainer" containerID="a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.775012 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184"} err="failed to get container status \"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184\": rpc error: code = NotFound desc = could not find container \"a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184\": container with ID starting with a17aacbf1309749ca5afdfa49639c407007b6c7fc34639d0fb430f4a06191184 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.775047 4788 scope.go:117] "RemoveContainer" containerID="2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.776615 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940"} err="failed to get container status \"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940\": rpc error: code = NotFound desc = could not find container \"2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940\": container with ID starting with 2b6cb5e3b1badd8c2a53282d14926531e1daac19e7b1e1806016e4f8d9d2c940 not found: ID does not exist" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.776645 4788 scope.go:117] "RemoveContainer" containerID="74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a" Dec 11 09:34:13 crc kubenswrapper[4788]: I1211 09:34:13.777429 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a"} err="failed to get container status \"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a\": rpc error: code = NotFound desc = could not find container \"74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a\": container with ID starting with 74c90f764bddd6e5e936ce9b9ba3ef3383743d9b4356ecf2679dd39f147ee06a not found: ID does not exist" Dec 11 09:34:14 crc kubenswrapper[4788]: I1211 09:34:14.504281 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3541a671-d810-482f-bf54-71e8f344b788" path="/var/lib/kubelet/pods/3541a671-d810-482f-bf54-71e8f344b788/volumes" Dec 11 09:34:14 crc kubenswrapper[4788]: I1211 09:34:14.549421 4788 generic.go:334] "Generic (PLEG): container finished" podID="f774bb21-cdcf-4740-ae2d-a91b406c4657" containerID="05053baf024e826ba6b32c18fdc3fbbbb7765294a316877d0c5222dd227eff5e" exitCode=0 Dec 11 09:34:14 crc kubenswrapper[4788]: I1211 09:34:14.549507 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" event={"ID":"f774bb21-cdcf-4740-ae2d-a91b406c4657","Type":"ContainerDied","Data":"05053baf024e826ba6b32c18fdc3fbbbb7765294a316877d0c5222dd227eff5e"} Dec 11 09:34:14 crc kubenswrapper[4788]: I1211 09:34:14.549542 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" event={"ID":"f774bb21-cdcf-4740-ae2d-a91b406c4657","Type":"ContainerStarted","Data":"a8b732a64120979deb08e93cbd8b4e43ddb99fb90cd4fe0b598178a2fd4b1ed6"} Dec 11 09:34:14 crc kubenswrapper[4788]: I1211 09:34:14.554636 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gtkxj_a3cea019-16b2-4a01-a945-cd2b37745330/kube-multus/0.log" Dec 11 09:34:14 crc kubenswrapper[4788]: I1211 09:34:14.554747 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gtkxj" event={"ID":"a3cea019-16b2-4a01-a945-cd2b37745330","Type":"ContainerStarted","Data":"f4ee5bbb88933615baf1e336a998d410c9f7b376e4f7af20d9b0fbacff7e7c1c"} Dec 11 09:34:15 crc kubenswrapper[4788]: I1211 09:34:15.566620 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" event={"ID":"f774bb21-cdcf-4740-ae2d-a91b406c4657","Type":"ContainerStarted","Data":"f3805f6db662b143f47159164ed43b54a9612970a98dc689130a9dc26b236a56"} Dec 11 09:34:15 crc kubenswrapper[4788]: I1211 09:34:15.566676 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" event={"ID":"f774bb21-cdcf-4740-ae2d-a91b406c4657","Type":"ContainerStarted","Data":"f6c45fa789174a923e6e6f28452c04863a7f5cdb1d7855b5c5ab90ec34dbc739"} Dec 11 09:34:15 crc kubenswrapper[4788]: I1211 09:34:15.566696 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" event={"ID":"f774bb21-cdcf-4740-ae2d-a91b406c4657","Type":"ContainerStarted","Data":"4603b36802e004d3e19d83fa5eb5145aba40879c3295e01cbb4e80da85ecad2a"} Dec 11 09:34:16 crc kubenswrapper[4788]: I1211 09:34:16.587940 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" event={"ID":"f774bb21-cdcf-4740-ae2d-a91b406c4657","Type":"ContainerStarted","Data":"41409128e7a873f339f1922bc2e3dde79889655e02ac9e5728c4ca60f27b0359"} Dec 11 09:34:16 crc kubenswrapper[4788]: I1211 09:34:16.588589 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" event={"ID":"f774bb21-cdcf-4740-ae2d-a91b406c4657","Type":"ContainerStarted","Data":"3b61ec69b5b59c4346e0082b65f3296885fa4341827e0999cae8c07ac976d636"} Dec 11 09:34:16 crc kubenswrapper[4788]: I1211 09:34:16.588608 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" event={"ID":"f774bb21-cdcf-4740-ae2d-a91b406c4657","Type":"ContainerStarted","Data":"22543cf72e30089a6ab1a471e4f88081d2d0773d72707c54ead6276fa8c95c06"} Dec 11 09:34:18 crc kubenswrapper[4788]: I1211 09:34:18.604743 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" event={"ID":"f774bb21-cdcf-4740-ae2d-a91b406c4657","Type":"ContainerStarted","Data":"d7e987b9d459d5e6d9a301fda818616a9560f87aaca344d596e728dbed5e652e"} Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.096518 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tlrsx"] Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.097948 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.166105 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-utilities\") pod \"community-operators-tlrsx\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.166439 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfhhf\" (UniqueName: \"kubernetes.io/projected/80d4daf9-430c-410c-a51c-31dabe6cf9b0-kube-api-access-pfhhf\") pod \"community-operators-tlrsx\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.166653 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-catalog-content\") pod \"community-operators-tlrsx\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.267686 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-catalog-content\") pod \"community-operators-tlrsx\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.267754 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-utilities\") pod \"community-operators-tlrsx\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.267821 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfhhf\" (UniqueName: \"kubernetes.io/projected/80d4daf9-430c-410c-a51c-31dabe6cf9b0-kube-api-access-pfhhf\") pod \"community-operators-tlrsx\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.268343 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-catalog-content\") pod \"community-operators-tlrsx\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.268472 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-utilities\") pod \"community-operators-tlrsx\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.291826 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfhhf\" (UniqueName: \"kubernetes.io/projected/80d4daf9-430c-410c-a51c-31dabe6cf9b0-kube-api-access-pfhhf\") pod \"community-operators-tlrsx\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: I1211 09:34:19.417863 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: E1211 09:34:19.447010 4788 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-tlrsx_openshift-marketplace_80d4daf9-430c-410c-a51c-31dabe6cf9b0_0(c18aad91ace461fde6b1bc84aae0e37f926dadba786d0d479771e1b7ec1b65ec): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 11 09:34:19 crc kubenswrapper[4788]: E1211 09:34:19.447140 4788 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-tlrsx_openshift-marketplace_80d4daf9-430c-410c-a51c-31dabe6cf9b0_0(c18aad91ace461fde6b1bc84aae0e37f926dadba786d0d479771e1b7ec1b65ec): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: E1211 09:34:19.447204 4788 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-tlrsx_openshift-marketplace_80d4daf9-430c-410c-a51c-31dabe6cf9b0_0(c18aad91ace461fde6b1bc84aae0e37f926dadba786d0d479771e1b7ec1b65ec): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:19 crc kubenswrapper[4788]: E1211 09:34:19.447281 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"community-operators-tlrsx_openshift-marketplace(80d4daf9-430c-410c-a51c-31dabe6cf9b0)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"community-operators-tlrsx_openshift-marketplace(80d4daf9-430c-410c-a51c-31dabe6cf9b0)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-tlrsx_openshift-marketplace_80d4daf9-430c-410c-a51c-31dabe6cf9b0_0(c18aad91ace461fde6b1bc84aae0e37f926dadba786d0d479771e1b7ec1b65ec): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/community-operators-tlrsx" podUID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" Dec 11 09:34:22 crc kubenswrapper[4788]: I1211 09:34:22.628736 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" event={"ID":"f774bb21-cdcf-4740-ae2d-a91b406c4657","Type":"ContainerStarted","Data":"9b693c60e334b138955252102c782819d8c5a912d55e8b3fe5a7d866800a1e2a"} Dec 11 09:34:22 crc kubenswrapper[4788]: I1211 09:34:22.629742 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:22 crc kubenswrapper[4788]: I1211 09:34:22.629759 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:22 crc kubenswrapper[4788]: I1211 09:34:22.629770 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:22 crc kubenswrapper[4788]: I1211 09:34:22.694155 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" podStartSLOduration=9.694123926 podStartE2EDuration="9.694123926s" podCreationTimestamp="2025-12-11 09:34:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:34:22.692535555 +0000 UTC m=+792.763315171" watchObservedRunningTime="2025-12-11 09:34:22.694123926 +0000 UTC m=+792.764903512" Dec 11 09:34:22 crc kubenswrapper[4788]: I1211 09:34:22.695330 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:22 crc kubenswrapper[4788]: I1211 09:34:22.696994 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:22 crc kubenswrapper[4788]: I1211 09:34:22.790249 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tlrsx"] Dec 11 09:34:22 crc kubenswrapper[4788]: I1211 09:34:22.790449 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:22 crc kubenswrapper[4788]: I1211 09:34:22.791065 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:22 crc kubenswrapper[4788]: E1211 09:34:22.826776 4788 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-tlrsx_openshift-marketplace_80d4daf9-430c-410c-a51c-31dabe6cf9b0_0(c0e9bb4a75a6e7a96a3f0ae6ada38a48aa6817da075ecbb57b7fac646f392dfd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 11 09:34:22 crc kubenswrapper[4788]: E1211 09:34:22.827474 4788 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-tlrsx_openshift-marketplace_80d4daf9-430c-410c-a51c-31dabe6cf9b0_0(c0e9bb4a75a6e7a96a3f0ae6ada38a48aa6817da075ecbb57b7fac646f392dfd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:22 crc kubenswrapper[4788]: E1211 09:34:22.827541 4788 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-tlrsx_openshift-marketplace_80d4daf9-430c-410c-a51c-31dabe6cf9b0_0(c0e9bb4a75a6e7a96a3f0ae6ada38a48aa6817da075ecbb57b7fac646f392dfd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:22 crc kubenswrapper[4788]: E1211 09:34:22.827643 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"community-operators-tlrsx_openshift-marketplace(80d4daf9-430c-410c-a51c-31dabe6cf9b0)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"community-operators-tlrsx_openshift-marketplace(80d4daf9-430c-410c-a51c-31dabe6cf9b0)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_community-operators-tlrsx_openshift-marketplace_80d4daf9-430c-410c-a51c-31dabe6cf9b0_0(c0e9bb4a75a6e7a96a3f0ae6ada38a48aa6817da075ecbb57b7fac646f392dfd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/community-operators-tlrsx" podUID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" Dec 11 09:34:38 crc kubenswrapper[4788]: I1211 09:34:38.495355 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:38 crc kubenswrapper[4788]: I1211 09:34:38.496785 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:38 crc kubenswrapper[4788]: I1211 09:34:38.914181 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tlrsx"] Dec 11 09:34:39 crc kubenswrapper[4788]: I1211 09:34:39.745850 4788 generic.go:334] "Generic (PLEG): container finished" podID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" containerID="b7a505b21b6b8686241e415802d4c53e495d4a6aa5f4ebd1df9d322963eede71" exitCode=0 Dec 11 09:34:39 crc kubenswrapper[4788]: I1211 09:34:39.745963 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlrsx" event={"ID":"80d4daf9-430c-410c-a51c-31dabe6cf9b0","Type":"ContainerDied","Data":"b7a505b21b6b8686241e415802d4c53e495d4a6aa5f4ebd1df9d322963eede71"} Dec 11 09:34:39 crc kubenswrapper[4788]: I1211 09:34:39.746309 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlrsx" event={"ID":"80d4daf9-430c-410c-a51c-31dabe6cf9b0","Type":"ContainerStarted","Data":"ecde431e0ab00ed00be277ea54257165508cf4c807a646e8f64cd872c3ba978d"} Dec 11 09:34:43 crc kubenswrapper[4788]: I1211 09:34:43.757268 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vp5fg" Dec 11 09:34:45 crc kubenswrapper[4788]: I1211 09:34:45.788794 4788 generic.go:334] "Generic (PLEG): container finished" podID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" containerID="a3d6f67b255b7fb9436e5a3bb0f3e023f68f770c52e479275454d3dfb2367255" exitCode=0 Dec 11 09:34:45 crc kubenswrapper[4788]: I1211 09:34:45.788879 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlrsx" event={"ID":"80d4daf9-430c-410c-a51c-31dabe6cf9b0","Type":"ContainerDied","Data":"a3d6f67b255b7fb9436e5a3bb0f3e023f68f770c52e479275454d3dfb2367255"} Dec 11 09:34:46 crc kubenswrapper[4788]: I1211 09:34:46.797356 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlrsx" event={"ID":"80d4daf9-430c-410c-a51c-31dabe6cf9b0","Type":"ContainerStarted","Data":"f1b38dae124b010aa809a03947705d4c97641506dd00ca39d0a3d2efd65d69ad"} Dec 11 09:34:46 crc kubenswrapper[4788]: I1211 09:34:46.823300 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tlrsx" podStartSLOduration=21.12185277 podStartE2EDuration="27.823261731s" podCreationTimestamp="2025-12-11 09:34:19 +0000 UTC" firstStartedPulling="2025-12-11 09:34:39.748063814 +0000 UTC m=+809.818843400" lastFinishedPulling="2025-12-11 09:34:46.449472775 +0000 UTC m=+816.520252361" observedRunningTime="2025-12-11 09:34:46.820809618 +0000 UTC m=+816.891589234" watchObservedRunningTime="2025-12-11 09:34:46.823261731 +0000 UTC m=+816.894041317" Dec 11 09:34:49 crc kubenswrapper[4788]: I1211 09:34:49.418949 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:49 crc kubenswrapper[4788]: I1211 09:34:49.419485 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:49 crc kubenswrapper[4788]: I1211 09:34:49.483359 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.502080 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj"] Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.503834 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.506500 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.520125 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj"] Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.566044 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpj8r\" (UniqueName: \"kubernetes.io/projected/d7be2709-1aa9-4107-88d0-ab448f415893-kube-api-access-jpj8r\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.566145 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.566260 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.667848 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpj8r\" (UniqueName: \"kubernetes.io/projected/d7be2709-1aa9-4107-88d0-ab448f415893-kube-api-access-jpj8r\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.667917 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.667965 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.668614 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.668736 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.690714 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpj8r\" (UniqueName: \"kubernetes.io/projected/d7be2709-1aa9-4107-88d0-ab448f415893-kube-api-access-jpj8r\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:34:53 crc kubenswrapper[4788]: I1211 09:34:53.821597 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:34:54 crc kubenswrapper[4788]: I1211 09:34:54.033072 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj"] Dec 11 09:34:54 crc kubenswrapper[4788]: W1211 09:34:54.037685 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7be2709_1aa9_4107_88d0_ab448f415893.slice/crio-8eb58aa489e99e0daec6f0df81376d8c48d7bf3097e6124318ef67faafcfe041 WatchSource:0}: Error finding container 8eb58aa489e99e0daec6f0df81376d8c48d7bf3097e6124318ef67faafcfe041: Status 404 returned error can't find the container with id 8eb58aa489e99e0daec6f0df81376d8c48d7bf3097e6124318ef67faafcfe041 Dec 11 09:34:54 crc kubenswrapper[4788]: I1211 09:34:54.851566 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" event={"ID":"d7be2709-1aa9-4107-88d0-ab448f415893","Type":"ContainerStarted","Data":"8eb58aa489e99e0daec6f0df81376d8c48d7bf3097e6124318ef67faafcfe041"} Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.393053 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rck9k"] Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.394889 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.410556 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rck9k"] Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.495041 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-utilities\") pod \"redhat-operators-rck9k\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.495110 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-catalog-content\") pod \"redhat-operators-rck9k\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.495183 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rr95\" (UniqueName: \"kubernetes.io/projected/1711c086-860f-4542-bc2e-2c7b93fd22e1-kube-api-access-4rr95\") pod \"redhat-operators-rck9k\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.596945 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rr95\" (UniqueName: \"kubernetes.io/projected/1711c086-860f-4542-bc2e-2c7b93fd22e1-kube-api-access-4rr95\") pod \"redhat-operators-rck9k\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.597029 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-utilities\") pod \"redhat-operators-rck9k\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.597065 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-catalog-content\") pod \"redhat-operators-rck9k\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.597695 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-catalog-content\") pod \"redhat-operators-rck9k\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.597824 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-utilities\") pod \"redhat-operators-rck9k\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.636614 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rr95\" (UniqueName: \"kubernetes.io/projected/1711c086-860f-4542-bc2e-2c7b93fd22e1-kube-api-access-4rr95\") pod \"redhat-operators-rck9k\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:34:55 crc kubenswrapper[4788]: I1211 09:34:55.713431 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:34:56 crc kubenswrapper[4788]: I1211 09:34:56.137710 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rck9k"] Dec 11 09:34:56 crc kubenswrapper[4788]: W1211 09:34:56.141917 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1711c086_860f_4542_bc2e_2c7b93fd22e1.slice/crio-11edab9d3adad89eadc832b44a5e2093e2152fe6973f9de835b43b3b9587bef0 WatchSource:0}: Error finding container 11edab9d3adad89eadc832b44a5e2093e2152fe6973f9de835b43b3b9587bef0: Status 404 returned error can't find the container with id 11edab9d3adad89eadc832b44a5e2093e2152fe6973f9de835b43b3b9587bef0 Dec 11 09:34:56 crc kubenswrapper[4788]: I1211 09:34:56.865408 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" event={"ID":"d7be2709-1aa9-4107-88d0-ab448f415893","Type":"ContainerStarted","Data":"2bbd61111b93c5325843a57c55d6438f1885c9eb5571aebf5ed220a0506c3e87"} Dec 11 09:34:56 crc kubenswrapper[4788]: I1211 09:34:56.868432 4788 generic.go:334] "Generic (PLEG): container finished" podID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerID="b33cad0e75ae0175cd78463cfed77488933a5906053aa67a8d7628c37fa77cb9" exitCode=0 Dec 11 09:34:56 crc kubenswrapper[4788]: I1211 09:34:56.868471 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rck9k" event={"ID":"1711c086-860f-4542-bc2e-2c7b93fd22e1","Type":"ContainerDied","Data":"b33cad0e75ae0175cd78463cfed77488933a5906053aa67a8d7628c37fa77cb9"} Dec 11 09:34:56 crc kubenswrapper[4788]: I1211 09:34:56.868493 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rck9k" event={"ID":"1711c086-860f-4542-bc2e-2c7b93fd22e1","Type":"ContainerStarted","Data":"11edab9d3adad89eadc832b44a5e2093e2152fe6973f9de835b43b3b9587bef0"} Dec 11 09:34:57 crc kubenswrapper[4788]: I1211 09:34:57.879106 4788 generic.go:334] "Generic (PLEG): container finished" podID="d7be2709-1aa9-4107-88d0-ab448f415893" containerID="2bbd61111b93c5325843a57c55d6438f1885c9eb5571aebf5ed220a0506c3e87" exitCode=0 Dec 11 09:34:57 crc kubenswrapper[4788]: I1211 09:34:57.879262 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" event={"ID":"d7be2709-1aa9-4107-88d0-ab448f415893","Type":"ContainerDied","Data":"2bbd61111b93c5325843a57c55d6438f1885c9eb5571aebf5ed220a0506c3e87"} Dec 11 09:34:58 crc kubenswrapper[4788]: I1211 09:34:58.888479 4788 generic.go:334] "Generic (PLEG): container finished" podID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerID="ef12b0044becdd53db36644f6c82202c65902a1bd46afb7a6d0487b76cec82c3" exitCode=0 Dec 11 09:34:58 crc kubenswrapper[4788]: I1211 09:34:58.888543 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rck9k" event={"ID":"1711c086-860f-4542-bc2e-2c7b93fd22e1","Type":"ContainerDied","Data":"ef12b0044becdd53db36644f6c82202c65902a1bd46afb7a6d0487b76cec82c3"} Dec 11 09:34:59 crc kubenswrapper[4788]: I1211 09:34:59.517004 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:34:59 crc kubenswrapper[4788]: I1211 09:34:59.897347 4788 generic.go:334] "Generic (PLEG): container finished" podID="d7be2709-1aa9-4107-88d0-ab448f415893" containerID="559e301816b06e626369674f3bd9d389864a3982d69a39f07d3b24bbb321d3a2" exitCode=0 Dec 11 09:34:59 crc kubenswrapper[4788]: I1211 09:34:59.897437 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" event={"ID":"d7be2709-1aa9-4107-88d0-ab448f415893","Type":"ContainerDied","Data":"559e301816b06e626369674f3bd9d389864a3982d69a39f07d3b24bbb321d3a2"} Dec 11 09:34:59 crc kubenswrapper[4788]: I1211 09:34:59.903141 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rck9k" event={"ID":"1711c086-860f-4542-bc2e-2c7b93fd22e1","Type":"ContainerStarted","Data":"cf473de55ad2068a4f2abfc18c08ff179df394f80dbd07c23f22d7316d4d1718"} Dec 11 09:34:59 crc kubenswrapper[4788]: I1211 09:34:59.968903 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rck9k" podStartSLOduration=2.358015514 podStartE2EDuration="4.968878682s" podCreationTimestamp="2025-12-11 09:34:55 +0000 UTC" firstStartedPulling="2025-12-11 09:34:56.870701502 +0000 UTC m=+826.941481088" lastFinishedPulling="2025-12-11 09:34:59.48156467 +0000 UTC m=+829.552344256" observedRunningTime="2025-12-11 09:34:59.966250594 +0000 UTC m=+830.037030190" watchObservedRunningTime="2025-12-11 09:34:59.968878682 +0000 UTC m=+830.039658268" Dec 11 09:35:00 crc kubenswrapper[4788]: I1211 09:35:00.912754 4788 generic.go:334] "Generic (PLEG): container finished" podID="d7be2709-1aa9-4107-88d0-ab448f415893" containerID="d604071bac94f6fed9cc9c8cbb77f77752227e435bd2ac681a6d814a10054f5b" exitCode=0 Dec 11 09:35:00 crc kubenswrapper[4788]: I1211 09:35:00.912823 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" event={"ID":"d7be2709-1aa9-4107-88d0-ab448f415893","Type":"ContainerDied","Data":"d604071bac94f6fed9cc9c8cbb77f77752227e435bd2ac681a6d814a10054f5b"} Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.245197 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.302534 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-bundle\") pod \"d7be2709-1aa9-4107-88d0-ab448f415893\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.302625 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-util\") pod \"d7be2709-1aa9-4107-88d0-ab448f415893\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.302659 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpj8r\" (UniqueName: \"kubernetes.io/projected/d7be2709-1aa9-4107-88d0-ab448f415893-kube-api-access-jpj8r\") pod \"d7be2709-1aa9-4107-88d0-ab448f415893\" (UID: \"d7be2709-1aa9-4107-88d0-ab448f415893\") " Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.303339 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-bundle" (OuterVolumeSpecName: "bundle") pod "d7be2709-1aa9-4107-88d0-ab448f415893" (UID: "d7be2709-1aa9-4107-88d0-ab448f415893"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.314559 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7be2709-1aa9-4107-88d0-ab448f415893-kube-api-access-jpj8r" (OuterVolumeSpecName: "kube-api-access-jpj8r") pod "d7be2709-1aa9-4107-88d0-ab448f415893" (UID: "d7be2709-1aa9-4107-88d0-ab448f415893"). InnerVolumeSpecName "kube-api-access-jpj8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.320034 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-util" (OuterVolumeSpecName: "util") pod "d7be2709-1aa9-4107-88d0-ab448f415893" (UID: "d7be2709-1aa9-4107-88d0-ab448f415893"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.404085 4788 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.404135 4788 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d7be2709-1aa9-4107-88d0-ab448f415893-util\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.404150 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpj8r\" (UniqueName: \"kubernetes.io/projected/d7be2709-1aa9-4107-88d0-ab448f415893-kube-api-access-jpj8r\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.926645 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" event={"ID":"d7be2709-1aa9-4107-88d0-ab448f415893","Type":"ContainerDied","Data":"8eb58aa489e99e0daec6f0df81376d8c48d7bf3097e6124318ef67faafcfe041"} Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.926709 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8eb58aa489e99e0daec6f0df81376d8c48d7bf3097e6124318ef67faafcfe041" Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.926719 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj" Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.987471 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tlrsx"] Dec 11 09:35:02 crc kubenswrapper[4788]: I1211 09:35:02.987820 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tlrsx" podUID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" containerName="registry-server" containerID="cri-o://f1b38dae124b010aa809a03947705d4c97641506dd00ca39d0a3d2efd65d69ad" gracePeriod=2 Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.289814 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-8dw7z"] Dec 11 09:35:04 crc kubenswrapper[4788]: E1211 09:35:04.290134 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7be2709-1aa9-4107-88d0-ab448f415893" containerName="extract" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.290150 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7be2709-1aa9-4107-88d0-ab448f415893" containerName="extract" Dec 11 09:35:04 crc kubenswrapper[4788]: E1211 09:35:04.290171 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7be2709-1aa9-4107-88d0-ab448f415893" containerName="util" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.290179 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7be2709-1aa9-4107-88d0-ab448f415893" containerName="util" Dec 11 09:35:04 crc kubenswrapper[4788]: E1211 09:35:04.290193 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7be2709-1aa9-4107-88d0-ab448f415893" containerName="pull" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.290201 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7be2709-1aa9-4107-88d0-ab448f415893" containerName="pull" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.290341 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7be2709-1aa9-4107-88d0-ab448f415893" containerName="extract" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.290872 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-8dw7z" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.292815 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-2wt64" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.293674 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.293824 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.308682 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-8dw7z"] Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.330609 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hnjp\" (UniqueName: \"kubernetes.io/projected/0f1f9ffb-58ec-4282-b3a1-d9040b09023b-kube-api-access-8hnjp\") pod \"nmstate-operator-6769fb99d-8dw7z\" (UID: \"0f1f9ffb-58ec-4282-b3a1-d9040b09023b\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-8dw7z" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.432300 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hnjp\" (UniqueName: \"kubernetes.io/projected/0f1f9ffb-58ec-4282-b3a1-d9040b09023b-kube-api-access-8hnjp\") pod \"nmstate-operator-6769fb99d-8dw7z\" (UID: \"0f1f9ffb-58ec-4282-b3a1-d9040b09023b\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-8dw7z" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.460140 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hnjp\" (UniqueName: \"kubernetes.io/projected/0f1f9ffb-58ec-4282-b3a1-d9040b09023b-kube-api-access-8hnjp\") pod \"nmstate-operator-6769fb99d-8dw7z\" (UID: \"0f1f9ffb-58ec-4282-b3a1-d9040b09023b\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-8dw7z" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.611769 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-8dw7z" Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.895022 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-8dw7z"] Dec 11 09:35:04 crc kubenswrapper[4788]: W1211 09:35:04.910743 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f1f9ffb_58ec_4282_b3a1_d9040b09023b.slice/crio-cbbb1e0e0fde1df6b96aa29fbc6d086c9b51268a6b5e261c68228f4280a1d174 WatchSource:0}: Error finding container cbbb1e0e0fde1df6b96aa29fbc6d086c9b51268a6b5e261c68228f4280a1d174: Status 404 returned error can't find the container with id cbbb1e0e0fde1df6b96aa29fbc6d086c9b51268a6b5e261c68228f4280a1d174 Dec 11 09:35:04 crc kubenswrapper[4788]: I1211 09:35:04.956919 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-8dw7z" event={"ID":"0f1f9ffb-58ec-4282-b3a1-d9040b09023b","Type":"ContainerStarted","Data":"cbbb1e0e0fde1df6b96aa29fbc6d086c9b51268a6b5e261c68228f4280a1d174"} Dec 11 09:35:05 crc kubenswrapper[4788]: I1211 09:35:05.714504 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:35:05 crc kubenswrapper[4788]: I1211 09:35:05.714593 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:35:06 crc kubenswrapper[4788]: I1211 09:35:06.753439 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rck9k" podUID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerName="registry-server" probeResult="failure" output=< Dec 11 09:35:06 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 09:35:06 crc kubenswrapper[4788]: > Dec 11 09:35:08 crc kubenswrapper[4788]: I1211 09:35:08.084579 4788 generic.go:334] "Generic (PLEG): container finished" podID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" containerID="f1b38dae124b010aa809a03947705d4c97641506dd00ca39d0a3d2efd65d69ad" exitCode=0 Dec 11 09:35:08 crc kubenswrapper[4788]: I1211 09:35:08.084662 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlrsx" event={"ID":"80d4daf9-430c-410c-a51c-31dabe6cf9b0","Type":"ContainerDied","Data":"f1b38dae124b010aa809a03947705d4c97641506dd00ca39d0a3d2efd65d69ad"} Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.167154 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.206822 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-catalog-content\") pod \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.207037 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-utilities\") pod \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.207128 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfhhf\" (UniqueName: \"kubernetes.io/projected/80d4daf9-430c-410c-a51c-31dabe6cf9b0-kube-api-access-pfhhf\") pod \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\" (UID: \"80d4daf9-430c-410c-a51c-31dabe6cf9b0\") " Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.219185 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-v7mt9"] Dec 11 09:35:09 crc kubenswrapper[4788]: E1211 09:35:09.221052 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" containerName="extract-utilities" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.221106 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" containerName="extract-utilities" Dec 11 09:35:09 crc kubenswrapper[4788]: E1211 09:35:09.221126 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" containerName="extract-content" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.221138 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" containerName="extract-content" Dec 11 09:35:09 crc kubenswrapper[4788]: E1211 09:35:09.221176 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" containerName="registry-server" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.221186 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" containerName="registry-server" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.221742 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-utilities" (OuterVolumeSpecName: "utilities") pod "80d4daf9-430c-410c-a51c-31dabe6cf9b0" (UID: "80d4daf9-430c-410c-a51c-31dabe6cf9b0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.241588 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80d4daf9-430c-410c-a51c-31dabe6cf9b0-kube-api-access-pfhhf" (OuterVolumeSpecName: "kube-api-access-pfhhf") pod "80d4daf9-430c-410c-a51c-31dabe6cf9b0" (UID: "80d4daf9-430c-410c-a51c-31dabe6cf9b0"). InnerVolumeSpecName "kube-api-access-pfhhf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.250457 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" containerName="registry-server" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.253205 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v7mt9"] Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.253453 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.295079 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "80d4daf9-430c-410c-a51c-31dabe6cf9b0" (UID: "80d4daf9-430c-410c-a51c-31dabe6cf9b0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.311191 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-catalog-content\") pod \"certified-operators-v7mt9\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.311333 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-utilities\") pod \"certified-operators-v7mt9\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.311362 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ddz7\" (UniqueName: \"kubernetes.io/projected/8cc1af01-3720-4f6d-873c-bc1aded7421a-kube-api-access-8ddz7\") pod \"certified-operators-v7mt9\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.311427 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.311442 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfhhf\" (UniqueName: \"kubernetes.io/projected/80d4daf9-430c-410c-a51c-31dabe6cf9b0-kube-api-access-pfhhf\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.311454 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80d4daf9-430c-410c-a51c-31dabe6cf9b0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.412569 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-catalog-content\") pod \"certified-operators-v7mt9\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.412624 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-utilities\") pod \"certified-operators-v7mt9\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.412663 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ddz7\" (UniqueName: \"kubernetes.io/projected/8cc1af01-3720-4f6d-873c-bc1aded7421a-kube-api-access-8ddz7\") pod \"certified-operators-v7mt9\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.413479 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-catalog-content\") pod \"certified-operators-v7mt9\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.413702 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-utilities\") pod \"certified-operators-v7mt9\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.433509 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ddz7\" (UniqueName: \"kubernetes.io/projected/8cc1af01-3720-4f6d-873c-bc1aded7421a-kube-api-access-8ddz7\") pod \"certified-operators-v7mt9\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.581741 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:09 crc kubenswrapper[4788]: I1211 09:35:09.855413 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v7mt9"] Dec 11 09:35:10 crc kubenswrapper[4788]: I1211 09:35:10.104049 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tlrsx" event={"ID":"80d4daf9-430c-410c-a51c-31dabe6cf9b0","Type":"ContainerDied","Data":"ecde431e0ab00ed00be277ea54257165508cf4c807a646e8f64cd872c3ba978d"} Dec 11 09:35:10 crc kubenswrapper[4788]: I1211 09:35:10.104125 4788 scope.go:117] "RemoveContainer" containerID="f1b38dae124b010aa809a03947705d4c97641506dd00ca39d0a3d2efd65d69ad" Dec 11 09:35:10 crc kubenswrapper[4788]: I1211 09:35:10.104144 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tlrsx" Dec 11 09:35:10 crc kubenswrapper[4788]: I1211 09:35:10.151098 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tlrsx"] Dec 11 09:35:10 crc kubenswrapper[4788]: I1211 09:35:10.154886 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tlrsx"] Dec 11 09:35:10 crc kubenswrapper[4788]: W1211 09:35:10.376279 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8cc1af01_3720_4f6d_873c_bc1aded7421a.slice/crio-22b3721955f0cab2ef7d8a30a707f5e4518b4e8ffb25abadfe2ab45c099c440c WatchSource:0}: Error finding container 22b3721955f0cab2ef7d8a30a707f5e4518b4e8ffb25abadfe2ab45c099c440c: Status 404 returned error can't find the container with id 22b3721955f0cab2ef7d8a30a707f5e4518b4e8ffb25abadfe2ab45c099c440c Dec 11 09:35:10 crc kubenswrapper[4788]: I1211 09:35:10.384473 4788 scope.go:117] "RemoveContainer" containerID="a3d6f67b255b7fb9436e5a3bb0f3e023f68f770c52e479275454d3dfb2367255" Dec 11 09:35:10 crc kubenswrapper[4788]: I1211 09:35:10.506839 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80d4daf9-430c-410c-a51c-31dabe6cf9b0" path="/var/lib/kubelet/pods/80d4daf9-430c-410c-a51c-31dabe6cf9b0/volumes" Dec 11 09:35:10 crc kubenswrapper[4788]: I1211 09:35:10.592875 4788 scope.go:117] "RemoveContainer" containerID="b7a505b21b6b8686241e415802d4c53e495d4a6aa5f4ebd1df9d322963eede71" Dec 11 09:35:11 crc kubenswrapper[4788]: I1211 09:35:11.114522 4788 generic.go:334] "Generic (PLEG): container finished" podID="8cc1af01-3720-4f6d-873c-bc1aded7421a" containerID="30cf3c562159e6db89ae5e8c62ffe8f672737e45045ff4ae61d2529c30bb0b58" exitCode=0 Dec 11 09:35:11 crc kubenswrapper[4788]: I1211 09:35:11.115122 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v7mt9" event={"ID":"8cc1af01-3720-4f6d-873c-bc1aded7421a","Type":"ContainerDied","Data":"30cf3c562159e6db89ae5e8c62ffe8f672737e45045ff4ae61d2529c30bb0b58"} Dec 11 09:35:11 crc kubenswrapper[4788]: I1211 09:35:11.115169 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v7mt9" event={"ID":"8cc1af01-3720-4f6d-873c-bc1aded7421a","Type":"ContainerStarted","Data":"22b3721955f0cab2ef7d8a30a707f5e4518b4e8ffb25abadfe2ab45c099c440c"} Dec 11 09:35:11 crc kubenswrapper[4788]: I1211 09:35:11.118826 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-8dw7z" event={"ID":"0f1f9ffb-58ec-4282-b3a1-d9040b09023b","Type":"ContainerStarted","Data":"7e9b95918feb24c7c1133eaf7abfc5db7cd35fa2db48aa0efb81e573874afe4b"} Dec 11 09:35:11 crc kubenswrapper[4788]: I1211 09:35:11.158540 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-6769fb99d-8dw7z" podStartSLOduration=1.4730286910000001 podStartE2EDuration="7.158509489s" podCreationTimestamp="2025-12-11 09:35:04 +0000 UTC" firstStartedPulling="2025-12-11 09:35:04.913334697 +0000 UTC m=+834.984114283" lastFinishedPulling="2025-12-11 09:35:10.598815495 +0000 UTC m=+840.669595081" observedRunningTime="2025-12-11 09:35:11.153870789 +0000 UTC m=+841.224650375" watchObservedRunningTime="2025-12-11 09:35:11.158509489 +0000 UTC m=+841.229289075" Dec 11 09:35:12 crc kubenswrapper[4788]: I1211 09:35:12.127709 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v7mt9" event={"ID":"8cc1af01-3720-4f6d-873c-bc1aded7421a","Type":"ContainerStarted","Data":"d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d"} Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.136893 4788 generic.go:334] "Generic (PLEG): container finished" podID="8cc1af01-3720-4f6d-873c-bc1aded7421a" containerID="d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d" exitCode=0 Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.136948 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v7mt9" event={"ID":"8cc1af01-3720-4f6d-873c-bc1aded7421a","Type":"ContainerDied","Data":"d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d"} Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.772442 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4"] Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.773777 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.783307 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf"] Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.784076 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.786134 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.789635 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-nclvb" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.804639 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4"] Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.826716 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-b6mlg"] Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.828673 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.844928 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf"] Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.879150 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/88b842a1-c94f-4a0a-b845-d5330f12a0a1-nmstate-lock\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.879560 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r4bx\" (UniqueName: \"kubernetes.io/projected/49a187f1-90cf-4afb-9dec-10bf57b8ff69-kube-api-access-9r4bx\") pod \"nmstate-metrics-7f7f7578db-fprr4\" (UID: \"49a187f1-90cf-4afb-9dec-10bf57b8ff69\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.879712 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sszlz\" (UniqueName: \"kubernetes.io/projected/88b842a1-c94f-4a0a-b845-d5330f12a0a1-kube-api-access-sszlz\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.879844 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/88b842a1-c94f-4a0a-b845-d5330f12a0a1-dbus-socket\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.879944 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxqqn\" (UniqueName: \"kubernetes.io/projected/5baf07b9-7c94-4c71-99ee-37b2e68d0437-kube-api-access-vxqqn\") pod \"nmstate-webhook-f8fb84555-4vbrf\" (UID: \"5baf07b9-7c94-4c71-99ee-37b2e68d0437\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.880036 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/88b842a1-c94f-4a0a-b845-d5330f12a0a1-ovs-socket\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.880152 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/5baf07b9-7c94-4c71-99ee-37b2e68d0437-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-4vbrf\" (UID: \"5baf07b9-7c94-4c71-99ee-37b2e68d0437\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.923906 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt"] Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.927146 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.930101 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.930487 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.930609 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-7tgfc" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.942945 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt"] Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.982384 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r4bx\" (UniqueName: \"kubernetes.io/projected/49a187f1-90cf-4afb-9dec-10bf57b8ff69-kube-api-access-9r4bx\") pod \"nmstate-metrics-7f7f7578db-fprr4\" (UID: \"49a187f1-90cf-4afb-9dec-10bf57b8ff69\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.982867 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r4qv\" (UniqueName: \"kubernetes.io/projected/06c736a4-1288-473a-bceb-0951aced851f-kube-api-access-4r4qv\") pod \"nmstate-console-plugin-6ff7998486-z27wt\" (UID: \"06c736a4-1288-473a-bceb-0951aced851f\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.982910 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sszlz\" (UniqueName: \"kubernetes.io/projected/88b842a1-c94f-4a0a-b845-d5330f12a0a1-kube-api-access-sszlz\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.982938 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/88b842a1-c94f-4a0a-b845-d5330f12a0a1-dbus-socket\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.982959 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/06c736a4-1288-473a-bceb-0951aced851f-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-z27wt\" (UID: \"06c736a4-1288-473a-bceb-0951aced851f\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.982985 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxqqn\" (UniqueName: \"kubernetes.io/projected/5baf07b9-7c94-4c71-99ee-37b2e68d0437-kube-api-access-vxqqn\") pod \"nmstate-webhook-f8fb84555-4vbrf\" (UID: \"5baf07b9-7c94-4c71-99ee-37b2e68d0437\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.983110 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/88b842a1-c94f-4a0a-b845-d5330f12a0a1-ovs-socket\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.983180 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/5baf07b9-7c94-4c71-99ee-37b2e68d0437-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-4vbrf\" (UID: \"5baf07b9-7c94-4c71-99ee-37b2e68d0437\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.983220 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/06c736a4-1288-473a-bceb-0951aced851f-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-z27wt\" (UID: \"06c736a4-1288-473a-bceb-0951aced851f\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.983455 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/88b842a1-c94f-4a0a-b845-d5330f12a0a1-nmstate-lock\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.983627 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/88b842a1-c94f-4a0a-b845-d5330f12a0a1-dbus-socket\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.983702 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/88b842a1-c94f-4a0a-b845-d5330f12a0a1-ovs-socket\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.983805 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/88b842a1-c94f-4a0a-b845-d5330f12a0a1-nmstate-lock\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:13 crc kubenswrapper[4788]: I1211 09:35:13.995745 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/5baf07b9-7c94-4c71-99ee-37b2e68d0437-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-4vbrf\" (UID: \"5baf07b9-7c94-4c71-99ee-37b2e68d0437\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.001033 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxqqn\" (UniqueName: \"kubernetes.io/projected/5baf07b9-7c94-4c71-99ee-37b2e68d0437-kube-api-access-vxqqn\") pod \"nmstate-webhook-f8fb84555-4vbrf\" (UID: \"5baf07b9-7c94-4c71-99ee-37b2e68d0437\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.003544 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r4bx\" (UniqueName: \"kubernetes.io/projected/49a187f1-90cf-4afb-9dec-10bf57b8ff69-kube-api-access-9r4bx\") pod \"nmstate-metrics-7f7f7578db-fprr4\" (UID: \"49a187f1-90cf-4afb-9dec-10bf57b8ff69\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.005803 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sszlz\" (UniqueName: \"kubernetes.io/projected/88b842a1-c94f-4a0a-b845-d5330f12a0a1-kube-api-access-sszlz\") pod \"nmstate-handler-b6mlg\" (UID: \"88b842a1-c94f-4a0a-b845-d5330f12a0a1\") " pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.084763 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/06c736a4-1288-473a-bceb-0951aced851f-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-z27wt\" (UID: \"06c736a4-1288-473a-bceb-0951aced851f\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.084835 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/06c736a4-1288-473a-bceb-0951aced851f-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-z27wt\" (UID: \"06c736a4-1288-473a-bceb-0951aced851f\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.084905 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r4qv\" (UniqueName: \"kubernetes.io/projected/06c736a4-1288-473a-bceb-0951aced851f-kube-api-access-4r4qv\") pod \"nmstate-console-plugin-6ff7998486-z27wt\" (UID: \"06c736a4-1288-473a-bceb-0951aced851f\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:14 crc kubenswrapper[4788]: E1211 09:35:14.085058 4788 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 11 09:35:14 crc kubenswrapper[4788]: E1211 09:35:14.085162 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06c736a4-1288-473a-bceb-0951aced851f-plugin-serving-cert podName:06c736a4-1288-473a-bceb-0951aced851f nodeName:}" failed. No retries permitted until 2025-12-11 09:35:14.585130542 +0000 UTC m=+844.655910128 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/06c736a4-1288-473a-bceb-0951aced851f-plugin-serving-cert") pod "nmstate-console-plugin-6ff7998486-z27wt" (UID: "06c736a4-1288-473a-bceb-0951aced851f") : secret "plugin-serving-cert" not found Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.085753 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/06c736a4-1288-473a-bceb-0951aced851f-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-z27wt\" (UID: \"06c736a4-1288-473a-bceb-0951aced851f\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.106771 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.121907 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r4qv\" (UniqueName: \"kubernetes.io/projected/06c736a4-1288-473a-bceb-0951aced851f-kube-api-access-4r4qv\") pod \"nmstate-console-plugin-6ff7998486-z27wt\" (UID: \"06c736a4-1288-473a-bceb-0951aced851f\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.124382 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.143395 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-bbcc9b596-ml8xq"] Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.144220 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.145441 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.162241 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-bbcc9b596-ml8xq"] Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.235175 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-console-oauth-config\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.235261 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-oauth-serving-cert\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.235291 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-console-config\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.235318 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j664z\" (UniqueName: \"kubernetes.io/projected/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-kube-api-access-j664z\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.235341 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-trusted-ca-bundle\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.235373 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-service-ca\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.239569 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-console-serving-cert\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.341611 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-console-serving-cert\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.342047 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-console-oauth-config\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.342068 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-oauth-serving-cert\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.342089 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-console-config\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.342111 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j664z\" (UniqueName: \"kubernetes.io/projected/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-kube-api-access-j664z\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.342130 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-trusted-ca-bundle\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.342152 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-service-ca\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.343600 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-service-ca\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.345389 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-console-config\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.346198 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-oauth-serving-cert\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.347862 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-trusted-ca-bundle\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.351453 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-console-oauth-config\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.357465 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-console-serving-cert\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.383033 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j664z\" (UniqueName: \"kubernetes.io/projected/31a41cbd-ce10-4dc4-98a9-c6c6adbd456b-kube-api-access-j664z\") pod \"console-bbcc9b596-ml8xq\" (UID: \"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b\") " pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.545511 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.647945 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/06c736a4-1288-473a-bceb-0951aced851f-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-z27wt\" (UID: \"06c736a4-1288-473a-bceb-0951aced851f\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.656888 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/06c736a4-1288-473a-bceb-0951aced851f-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-z27wt\" (UID: \"06c736a4-1288-473a-bceb-0951aced851f\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.667659 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf"] Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.716205 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4"] Dec 11 09:35:14 crc kubenswrapper[4788]: W1211 09:35:14.730561 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49a187f1_90cf_4afb_9dec_10bf57b8ff69.slice/crio-505006149bc1dcf77dae8b007677cd9a56734946b1534a2b8d327632a804327d WatchSource:0}: Error finding container 505006149bc1dcf77dae8b007677cd9a56734946b1534a2b8d327632a804327d: Status 404 returned error can't find the container with id 505006149bc1dcf77dae8b007677cd9a56734946b1534a2b8d327632a804327d Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.792679 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-bbcc9b596-ml8xq"] Dec 11 09:35:14 crc kubenswrapper[4788]: W1211 09:35:14.796805 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31a41cbd_ce10_4dc4_98a9_c6c6adbd456b.slice/crio-445fed7cd57bc79f1df0054dad5a41b089f7d864e3c0a2ef12f180e5ecd9b53c WatchSource:0}: Error finding container 445fed7cd57bc79f1df0054dad5a41b089f7d864e3c0a2ef12f180e5ecd9b53c: Status 404 returned error can't find the container with id 445fed7cd57bc79f1df0054dad5a41b089f7d864e3c0a2ef12f180e5ecd9b53c Dec 11 09:35:14 crc kubenswrapper[4788]: I1211 09:35:14.851564 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.076775 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt"] Dec 11 09:35:15 crc kubenswrapper[4788]: W1211 09:35:15.090278 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06c736a4_1288_473a_bceb_0951aced851f.slice/crio-bf3e989d1a6035b79213e60b7656a1e14e6d7b79e8aef4074567dff4b3eb566b WatchSource:0}: Error finding container bf3e989d1a6035b79213e60b7656a1e14e6d7b79e8aef4074567dff4b3eb566b: Status 404 returned error can't find the container with id bf3e989d1a6035b79213e60b7656a1e14e6d7b79e8aef4074567dff4b3eb566b Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.173870 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-b6mlg" event={"ID":"88b842a1-c94f-4a0a-b845-d5330f12a0a1","Type":"ContainerStarted","Data":"5b3fbca5c8284b2e04a830e9651ad1de243424cda870a5ed3ffd538f6f89bc3c"} Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.176379 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" event={"ID":"06c736a4-1288-473a-bceb-0951aced851f","Type":"ContainerStarted","Data":"bf3e989d1a6035b79213e60b7656a1e14e6d7b79e8aef4074567dff4b3eb566b"} Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.178062 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" event={"ID":"5baf07b9-7c94-4c71-99ee-37b2e68d0437","Type":"ContainerStarted","Data":"c0271e6dc4aea75d99a6a362f9c2e4b2e81a04d955bb0a0ab9c8e3ab393006b4"} Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.180606 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-bbcc9b596-ml8xq" event={"ID":"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b","Type":"ContainerStarted","Data":"18183e1a1f158ca5c9facbd5f86bae6dbd90049a2ac00f8324aaddf9b33a9310"} Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.180840 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-bbcc9b596-ml8xq" event={"ID":"31a41cbd-ce10-4dc4-98a9-c6c6adbd456b","Type":"ContainerStarted","Data":"445fed7cd57bc79f1df0054dad5a41b089f7d864e3c0a2ef12f180e5ecd9b53c"} Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.183470 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4" event={"ID":"49a187f1-90cf-4afb-9dec-10bf57b8ff69","Type":"ContainerStarted","Data":"505006149bc1dcf77dae8b007677cd9a56734946b1534a2b8d327632a804327d"} Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.187416 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v7mt9" event={"ID":"8cc1af01-3720-4f6d-873c-bc1aded7421a","Type":"ContainerStarted","Data":"4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657"} Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.205958 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-bbcc9b596-ml8xq" podStartSLOduration=1.205932716 podStartE2EDuration="1.205932716s" podCreationTimestamp="2025-12-11 09:35:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:35:15.204490409 +0000 UTC m=+845.275270035" watchObservedRunningTime="2025-12-11 09:35:15.205932716 +0000 UTC m=+845.276712302" Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.231935 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-v7mt9" podStartSLOduration=3.4236259540000002 podStartE2EDuration="6.231916788s" podCreationTimestamp="2025-12-11 09:35:09 +0000 UTC" firstStartedPulling="2025-12-11 09:35:11.118507394 +0000 UTC m=+841.189286980" lastFinishedPulling="2025-12-11 09:35:13.926798228 +0000 UTC m=+843.997577814" observedRunningTime="2025-12-11 09:35:15.229820634 +0000 UTC m=+845.300600220" watchObservedRunningTime="2025-12-11 09:35:15.231916788 +0000 UTC m=+845.302696374" Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.759737 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:35:15 crc kubenswrapper[4788]: I1211 09:35:15.804083 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:35:17 crc kubenswrapper[4788]: I1211 09:35:17.585157 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rck9k"] Dec 11 09:35:17 crc kubenswrapper[4788]: I1211 09:35:17.585893 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rck9k" podUID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerName="registry-server" containerID="cri-o://cf473de55ad2068a4f2abfc18c08ff179df394f80dbd07c23f22d7316d4d1718" gracePeriod=2 Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.211536 4788 generic.go:334] "Generic (PLEG): container finished" podID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerID="cf473de55ad2068a4f2abfc18c08ff179df394f80dbd07c23f22d7316d4d1718" exitCode=0 Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.211592 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rck9k" event={"ID":"1711c086-860f-4542-bc2e-2c7b93fd22e1","Type":"ContainerDied","Data":"cf473de55ad2068a4f2abfc18c08ff179df394f80dbd07c23f22d7316d4d1718"} Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.406621 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.512463 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rr95\" (UniqueName: \"kubernetes.io/projected/1711c086-860f-4542-bc2e-2c7b93fd22e1-kube-api-access-4rr95\") pod \"1711c086-860f-4542-bc2e-2c7b93fd22e1\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.512926 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-utilities\") pod \"1711c086-860f-4542-bc2e-2c7b93fd22e1\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.512977 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-catalog-content\") pod \"1711c086-860f-4542-bc2e-2c7b93fd22e1\" (UID: \"1711c086-860f-4542-bc2e-2c7b93fd22e1\") " Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.514052 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-utilities" (OuterVolumeSpecName: "utilities") pod "1711c086-860f-4542-bc2e-2c7b93fd22e1" (UID: "1711c086-860f-4542-bc2e-2c7b93fd22e1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.514682 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.519931 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1711c086-860f-4542-bc2e-2c7b93fd22e1-kube-api-access-4rr95" (OuterVolumeSpecName: "kube-api-access-4rr95") pod "1711c086-860f-4542-bc2e-2c7b93fd22e1" (UID: "1711c086-860f-4542-bc2e-2c7b93fd22e1"). InnerVolumeSpecName "kube-api-access-4rr95". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.619301 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rr95\" (UniqueName: \"kubernetes.io/projected/1711c086-860f-4542-bc2e-2c7b93fd22e1-kube-api-access-4rr95\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.625263 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1711c086-860f-4542-bc2e-2c7b93fd22e1" (UID: "1711c086-860f-4542-bc2e-2c7b93fd22e1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:35:18 crc kubenswrapper[4788]: I1211 09:35:18.720567 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1711c086-860f-4542-bc2e-2c7b93fd22e1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.221135 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4" event={"ID":"49a187f1-90cf-4afb-9dec-10bf57b8ff69","Type":"ContainerStarted","Data":"15fb794ee8c9eda99729e8db6c8da9a6b9d43f5f8550094c85e6bb3ef82ea4e8"} Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.222794 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-b6mlg" event={"ID":"88b842a1-c94f-4a0a-b845-d5330f12a0a1","Type":"ContainerStarted","Data":"20c039c905b6595912dce0100e6a9cf51dde37c6a51f8eb67f8720ac10bbd500"} Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.222921 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.224074 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" event={"ID":"06c736a4-1288-473a-bceb-0951aced851f","Type":"ContainerStarted","Data":"3d165094e2ff7265413f9e6acf5d6eb0a4a048e2161f2ae1ce0f8603057d3ac4"} Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.226193 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" event={"ID":"5baf07b9-7c94-4c71-99ee-37b2e68d0437","Type":"ContainerStarted","Data":"6d653c37aad08790fa80e62ec8d1030bba3ae0584cdbb406dd87408128f1432a"} Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.226347 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.229814 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rck9k" event={"ID":"1711c086-860f-4542-bc2e-2c7b93fd22e1","Type":"ContainerDied","Data":"11edab9d3adad89eadc832b44a5e2093e2152fe6973f9de835b43b3b9587bef0"} Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.229854 4788 scope.go:117] "RemoveContainer" containerID="cf473de55ad2068a4f2abfc18c08ff179df394f80dbd07c23f22d7316d4d1718" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.229888 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rck9k" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.249984 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-b6mlg" podStartSLOduration=2.342347472 podStartE2EDuration="6.249955267s" podCreationTimestamp="2025-12-11 09:35:13 +0000 UTC" firstStartedPulling="2025-12-11 09:35:14.296421916 +0000 UTC m=+844.367201512" lastFinishedPulling="2025-12-11 09:35:18.204029721 +0000 UTC m=+848.274809307" observedRunningTime="2025-12-11 09:35:19.242464744 +0000 UTC m=+849.313244340" watchObservedRunningTime="2025-12-11 09:35:19.249955267 +0000 UTC m=+849.320734873" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.254480 4788 scope.go:117] "RemoveContainer" containerID="ef12b0044becdd53db36644f6c82202c65902a1bd46afb7a6d0487b76cec82c3" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.266676 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" podStartSLOduration=2.76990339 podStartE2EDuration="6.266649255s" podCreationTimestamp="2025-12-11 09:35:13 +0000 UTC" firstStartedPulling="2025-12-11 09:35:14.677520081 +0000 UTC m=+844.748299657" lastFinishedPulling="2025-12-11 09:35:18.174265916 +0000 UTC m=+848.245045522" observedRunningTime="2025-12-11 09:35:19.26486184 +0000 UTC m=+849.335641426" watchObservedRunningTime="2025-12-11 09:35:19.266649255 +0000 UTC m=+849.337428841" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.285213 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-z27wt" podStartSLOduration=3.203858531 podStartE2EDuration="6.285149301s" podCreationTimestamp="2025-12-11 09:35:13 +0000 UTC" firstStartedPulling="2025-12-11 09:35:15.094164776 +0000 UTC m=+845.164944362" lastFinishedPulling="2025-12-11 09:35:18.175455536 +0000 UTC m=+848.246235132" observedRunningTime="2025-12-11 09:35:19.284744701 +0000 UTC m=+849.355524287" watchObservedRunningTime="2025-12-11 09:35:19.285149301 +0000 UTC m=+849.355928887" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.316769 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rck9k"] Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.319634 4788 scope.go:117] "RemoveContainer" containerID="b33cad0e75ae0175cd78463cfed77488933a5906053aa67a8d7628c37fa77cb9" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.320069 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rck9k"] Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.582634 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.582794 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:19 crc kubenswrapper[4788]: I1211 09:35:19.638116 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:20 crc kubenswrapper[4788]: I1211 09:35:20.285728 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:20 crc kubenswrapper[4788]: I1211 09:35:20.513072 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1711c086-860f-4542-bc2e-2c7b93fd22e1" path="/var/lib/kubelet/pods/1711c086-860f-4542-bc2e-2c7b93fd22e1/volumes" Dec 11 09:35:21 crc kubenswrapper[4788]: I1211 09:35:21.987310 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v7mt9"] Dec 11 09:35:22 crc kubenswrapper[4788]: I1211 09:35:22.253018 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4" event={"ID":"49a187f1-90cf-4afb-9dec-10bf57b8ff69","Type":"ContainerStarted","Data":"f22055d3404852ad787486b0439f981125c4c8d7ca60b39379cc871a9cf32ac2"} Dec 11 09:35:22 crc kubenswrapper[4788]: I1211 09:35:22.277549 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fprr4" podStartSLOduration=2.101878825 podStartE2EDuration="9.277521611s" podCreationTimestamp="2025-12-11 09:35:13 +0000 UTC" firstStartedPulling="2025-12-11 09:35:14.732902454 +0000 UTC m=+844.803682040" lastFinishedPulling="2025-12-11 09:35:21.90854524 +0000 UTC m=+851.979324826" observedRunningTime="2025-12-11 09:35:22.270735007 +0000 UTC m=+852.341514613" watchObservedRunningTime="2025-12-11 09:35:22.277521611 +0000 UTC m=+852.348301197" Dec 11 09:35:23 crc kubenswrapper[4788]: I1211 09:35:23.258794 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-v7mt9" podUID="8cc1af01-3720-4f6d-873c-bc1aded7421a" containerName="registry-server" containerID="cri-o://4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657" gracePeriod=2 Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.175154 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.179796 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-b6mlg" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.267696 4788 generic.go:334] "Generic (PLEG): container finished" podID="8cc1af01-3720-4f6d-873c-bc1aded7421a" containerID="4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657" exitCode=0 Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.267754 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v7mt9" event={"ID":"8cc1af01-3720-4f6d-873c-bc1aded7421a","Type":"ContainerDied","Data":"4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657"} Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.267796 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v7mt9" event={"ID":"8cc1af01-3720-4f6d-873c-bc1aded7421a","Type":"ContainerDied","Data":"22b3721955f0cab2ef7d8a30a707f5e4518b4e8ffb25abadfe2ab45c099c440c"} Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.267827 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v7mt9" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.267836 4788 scope.go:117] "RemoveContainer" containerID="4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.281441 4788 scope.go:117] "RemoveContainer" containerID="d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.296094 4788 scope.go:117] "RemoveContainer" containerID="30cf3c562159e6db89ae5e8c62ffe8f672737e45045ff4ae61d2529c30bb0b58" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.306297 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-utilities\") pod \"8cc1af01-3720-4f6d-873c-bc1aded7421a\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.306602 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ddz7\" (UniqueName: \"kubernetes.io/projected/8cc1af01-3720-4f6d-873c-bc1aded7421a-kube-api-access-8ddz7\") pod \"8cc1af01-3720-4f6d-873c-bc1aded7421a\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.307550 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-utilities" (OuterVolumeSpecName: "utilities") pod "8cc1af01-3720-4f6d-873c-bc1aded7421a" (UID: "8cc1af01-3720-4f6d-873c-bc1aded7421a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.308080 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-catalog-content\") pod \"8cc1af01-3720-4f6d-873c-bc1aded7421a\" (UID: \"8cc1af01-3720-4f6d-873c-bc1aded7421a\") " Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.308557 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.313159 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cc1af01-3720-4f6d-873c-bc1aded7421a-kube-api-access-8ddz7" (OuterVolumeSpecName: "kube-api-access-8ddz7") pod "8cc1af01-3720-4f6d-873c-bc1aded7421a" (UID: "8cc1af01-3720-4f6d-873c-bc1aded7421a"). InnerVolumeSpecName "kube-api-access-8ddz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.318634 4788 scope.go:117] "RemoveContainer" containerID="4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657" Dec 11 09:35:24 crc kubenswrapper[4788]: E1211 09:35:24.319445 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657\": container with ID starting with 4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657 not found: ID does not exist" containerID="4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.319489 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657"} err="failed to get container status \"4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657\": rpc error: code = NotFound desc = could not find container \"4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657\": container with ID starting with 4e36a27da548d8eacaab31d082a000aab86e9c54b45b8abd56550f32a40ad657 not found: ID does not exist" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.319524 4788 scope.go:117] "RemoveContainer" containerID="d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d" Dec 11 09:35:24 crc kubenswrapper[4788]: E1211 09:35:24.319912 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d\": container with ID starting with d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d not found: ID does not exist" containerID="d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.319946 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d"} err="failed to get container status \"d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d\": rpc error: code = NotFound desc = could not find container \"d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d\": container with ID starting with d1300cb3bed6c7dd05e1af2a3d55a6b6a79f547fbcca5e4d68b20caeffe76e8d not found: ID does not exist" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.319964 4788 scope.go:117] "RemoveContainer" containerID="30cf3c562159e6db89ae5e8c62ffe8f672737e45045ff4ae61d2529c30bb0b58" Dec 11 09:35:24 crc kubenswrapper[4788]: E1211 09:35:24.320442 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30cf3c562159e6db89ae5e8c62ffe8f672737e45045ff4ae61d2529c30bb0b58\": container with ID starting with 30cf3c562159e6db89ae5e8c62ffe8f672737e45045ff4ae61d2529c30bb0b58 not found: ID does not exist" containerID="30cf3c562159e6db89ae5e8c62ffe8f672737e45045ff4ae61d2529c30bb0b58" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.320473 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30cf3c562159e6db89ae5e8c62ffe8f672737e45045ff4ae61d2529c30bb0b58"} err="failed to get container status \"30cf3c562159e6db89ae5e8c62ffe8f672737e45045ff4ae61d2529c30bb0b58\": rpc error: code = NotFound desc = could not find container \"30cf3c562159e6db89ae5e8c62ffe8f672737e45045ff4ae61d2529c30bb0b58\": container with ID starting with 30cf3c562159e6db89ae5e8c62ffe8f672737e45045ff4ae61d2529c30bb0b58 not found: ID does not exist" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.359398 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8cc1af01-3720-4f6d-873c-bc1aded7421a" (UID: "8cc1af01-3720-4f6d-873c-bc1aded7421a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.410047 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ddz7\" (UniqueName: \"kubernetes.io/projected/8cc1af01-3720-4f6d-873c-bc1aded7421a-kube-api-access-8ddz7\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.410111 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc1af01-3720-4f6d-873c-bc1aded7421a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.546617 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.546689 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.552562 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.608247 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v7mt9"] Dec 11 09:35:24 crc kubenswrapper[4788]: I1211 09:35:24.612081 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-v7mt9"] Dec 11 09:35:25 crc kubenswrapper[4788]: I1211 09:35:25.279986 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-bbcc9b596-ml8xq" Dec 11 09:35:25 crc kubenswrapper[4788]: I1211 09:35:25.340565 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-jdvlj"] Dec 11 09:35:26 crc kubenswrapper[4788]: I1211 09:35:26.507555 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cc1af01-3720-4f6d-873c-bc1aded7421a" path="/var/lib/kubelet/pods/8cc1af01-3720-4f6d-873c-bc1aded7421a/volumes" Dec 11 09:35:34 crc kubenswrapper[4788]: I1211 09:35:34.130932 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-f8fb84555-4vbrf" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.008559 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6dz96"] Dec 11 09:35:38 crc kubenswrapper[4788]: E1211 09:35:38.009698 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc1af01-3720-4f6d-873c-bc1aded7421a" containerName="extract-content" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.009749 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc1af01-3720-4f6d-873c-bc1aded7421a" containerName="extract-content" Dec 11 09:35:38 crc kubenswrapper[4788]: E1211 09:35:38.009773 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerName="extract-content" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.009783 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerName="extract-content" Dec 11 09:35:38 crc kubenswrapper[4788]: E1211 09:35:38.009818 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerName="extract-utilities" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.009832 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerName="extract-utilities" Dec 11 09:35:38 crc kubenswrapper[4788]: E1211 09:35:38.009846 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc1af01-3720-4f6d-873c-bc1aded7421a" containerName="extract-utilities" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.009855 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc1af01-3720-4f6d-873c-bc1aded7421a" containerName="extract-utilities" Dec 11 09:35:38 crc kubenswrapper[4788]: E1211 09:35:38.009898 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerName="registry-server" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.009909 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerName="registry-server" Dec 11 09:35:38 crc kubenswrapper[4788]: E1211 09:35:38.009919 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc1af01-3720-4f6d-873c-bc1aded7421a" containerName="registry-server" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.009927 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc1af01-3720-4f6d-873c-bc1aded7421a" containerName="registry-server" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.010249 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cc1af01-3720-4f6d-873c-bc1aded7421a" containerName="registry-server" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.010267 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="1711c086-860f-4542-bc2e-2c7b93fd22e1" containerName="registry-server" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.011873 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.015847 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6dz96"] Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.122248 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh9d7\" (UniqueName: \"kubernetes.io/projected/50681771-96d6-4bd4-ac86-907c2af57451-kube-api-access-wh9d7\") pod \"redhat-marketplace-6dz96\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.122341 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-catalog-content\") pod \"redhat-marketplace-6dz96\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.122529 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-utilities\") pod \"redhat-marketplace-6dz96\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.223501 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-utilities\") pod \"redhat-marketplace-6dz96\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.223649 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh9d7\" (UniqueName: \"kubernetes.io/projected/50681771-96d6-4bd4-ac86-907c2af57451-kube-api-access-wh9d7\") pod \"redhat-marketplace-6dz96\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.223685 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-catalog-content\") pod \"redhat-marketplace-6dz96\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.224354 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-catalog-content\") pod \"redhat-marketplace-6dz96\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.225490 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-utilities\") pod \"redhat-marketplace-6dz96\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.254879 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh9d7\" (UniqueName: \"kubernetes.io/projected/50681771-96d6-4bd4-ac86-907c2af57451-kube-api-access-wh9d7\") pod \"redhat-marketplace-6dz96\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.339660 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:38 crc kubenswrapper[4788]: I1211 09:35:38.588732 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6dz96"] Dec 11 09:35:38 crc kubenswrapper[4788]: W1211 09:35:38.608733 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50681771_96d6_4bd4_ac86_907c2af57451.slice/crio-a9fb4f4e6f3bee6cb230c829a9251a7cef7dfcbf17827447eee547111109391b WatchSource:0}: Error finding container a9fb4f4e6f3bee6cb230c829a9251a7cef7dfcbf17827447eee547111109391b: Status 404 returned error can't find the container with id a9fb4f4e6f3bee6cb230c829a9251a7cef7dfcbf17827447eee547111109391b Dec 11 09:35:39 crc kubenswrapper[4788]: I1211 09:35:39.375836 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dz96" event={"ID":"50681771-96d6-4bd4-ac86-907c2af57451","Type":"ContainerStarted","Data":"a9fb4f4e6f3bee6cb230c829a9251a7cef7dfcbf17827447eee547111109391b"} Dec 11 09:35:40 crc kubenswrapper[4788]: I1211 09:35:40.385629 4788 generic.go:334] "Generic (PLEG): container finished" podID="50681771-96d6-4bd4-ac86-907c2af57451" containerID="1fe68ea0620c85322db8ed1e2491d20e4b7655d09a4c2d8c018e4c72668678d2" exitCode=0 Dec 11 09:35:40 crc kubenswrapper[4788]: I1211 09:35:40.385700 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dz96" event={"ID":"50681771-96d6-4bd4-ac86-907c2af57451","Type":"ContainerDied","Data":"1fe68ea0620c85322db8ed1e2491d20e4b7655d09a4c2d8c018e4c72668678d2"} Dec 11 09:35:41 crc kubenswrapper[4788]: I1211 09:35:41.396038 4788 generic.go:334] "Generic (PLEG): container finished" podID="50681771-96d6-4bd4-ac86-907c2af57451" containerID="eebf13febb9d6d376690742c2c330b5f7a7306b92ef3f605987d0a371f18a1ee" exitCode=0 Dec 11 09:35:41 crc kubenswrapper[4788]: I1211 09:35:41.396124 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dz96" event={"ID":"50681771-96d6-4bd4-ac86-907c2af57451","Type":"ContainerDied","Data":"eebf13febb9d6d376690742c2c330b5f7a7306b92ef3f605987d0a371f18a1ee"} Dec 11 09:35:44 crc kubenswrapper[4788]: I1211 09:35:44.423061 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dz96" event={"ID":"50681771-96d6-4bd4-ac86-907c2af57451","Type":"ContainerStarted","Data":"4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c"} Dec 11 09:35:44 crc kubenswrapper[4788]: I1211 09:35:44.448753 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6dz96" podStartSLOduration=4.116402228 podStartE2EDuration="7.448731542s" podCreationTimestamp="2025-12-11 09:35:37 +0000 UTC" firstStartedPulling="2025-12-11 09:35:40.388018662 +0000 UTC m=+870.458798238" lastFinishedPulling="2025-12-11 09:35:43.720347966 +0000 UTC m=+873.791127552" observedRunningTime="2025-12-11 09:35:44.447104631 +0000 UTC m=+874.517884227" watchObservedRunningTime="2025-12-11 09:35:44.448731542 +0000 UTC m=+874.519511128" Dec 11 09:35:48 crc kubenswrapper[4788]: I1211 09:35:48.340267 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:48 crc kubenswrapper[4788]: I1211 09:35:48.341026 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:48 crc kubenswrapper[4788]: I1211 09:35:48.390224 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:35:49 crc kubenswrapper[4788]: I1211 09:35:49.907835 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd"] Dec 11 09:35:49 crc kubenswrapper[4788]: I1211 09:35:49.909607 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:49 crc kubenswrapper[4788]: I1211 09:35:49.912422 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 11 09:35:49 crc kubenswrapper[4788]: I1211 09:35:49.918596 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd"] Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.045399 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.045597 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htb8t\" (UniqueName: \"kubernetes.io/projected/72e89d4d-8a18-4508-9e63-096103af0b70-kube-api-access-htb8t\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.045767 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.148057 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htb8t\" (UniqueName: \"kubernetes.io/projected/72e89d4d-8a18-4508-9e63-096103af0b70-kube-api-access-htb8t\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.148146 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.148217 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.148867 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.148985 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.171736 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htb8t\" (UniqueName: \"kubernetes.io/projected/72e89d4d-8a18-4508-9e63-096103af0b70-kube-api-access-htb8t\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.226770 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.387795 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-jdvlj" podUID="2d7342e7-facb-49b4-adee-0e6e25c9fa8e" containerName="console" containerID="cri-o://3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7" gracePeriod=15 Dec 11 09:35:50 crc kubenswrapper[4788]: I1211 09:35:50.477250 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd"] Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.259605 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-jdvlj_2d7342e7-facb-49b4-adee-0e6e25c9fa8e/console/0.log" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.260034 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.366091 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-config\") pod \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.366205 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-oauth-config\") pod \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.366251 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-trusted-ca-bundle\") pod \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.366297 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-serving-cert\") pod \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.366322 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mw28p\" (UniqueName: \"kubernetes.io/projected/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-kube-api-access-mw28p\") pod \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.366351 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-oauth-serving-cert\") pod \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.366371 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-service-ca\") pod \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\" (UID: \"2d7342e7-facb-49b4-adee-0e6e25c9fa8e\") " Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.367272 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-config" (OuterVolumeSpecName: "console-config") pod "2d7342e7-facb-49b4-adee-0e6e25c9fa8e" (UID: "2d7342e7-facb-49b4-adee-0e6e25c9fa8e"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.369644 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "2d7342e7-facb-49b4-adee-0e6e25c9fa8e" (UID: "2d7342e7-facb-49b4-adee-0e6e25c9fa8e"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.370167 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "2d7342e7-facb-49b4-adee-0e6e25c9fa8e" (UID: "2d7342e7-facb-49b4-adee-0e6e25c9fa8e"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.370761 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.370997 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.371026 4788 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.371105 4788 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.371118 4788 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.371763 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-service-ca" (OuterVolumeSpecName: "service-ca") pod "2d7342e7-facb-49b4-adee-0e6e25c9fa8e" (UID: "2d7342e7-facb-49b4-adee-0e6e25c9fa8e"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.375761 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "2d7342e7-facb-49b4-adee-0e6e25c9fa8e" (UID: "2d7342e7-facb-49b4-adee-0e6e25c9fa8e"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.376308 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "2d7342e7-facb-49b4-adee-0e6e25c9fa8e" (UID: "2d7342e7-facb-49b4-adee-0e6e25c9fa8e"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.376705 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-kube-api-access-mw28p" (OuterVolumeSpecName: "kube-api-access-mw28p") pod "2d7342e7-facb-49b4-adee-0e6e25c9fa8e" (UID: "2d7342e7-facb-49b4-adee-0e6e25c9fa8e"). InnerVolumeSpecName "kube-api-access-mw28p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.472223 4788 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.472290 4788 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.472308 4788 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.472320 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mw28p\" (UniqueName: \"kubernetes.io/projected/2d7342e7-facb-49b4-adee-0e6e25c9fa8e-kube-api-access-mw28p\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.479975 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-jdvlj_2d7342e7-facb-49b4-adee-0e6e25c9fa8e/console/0.log" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.480256 4788 generic.go:334] "Generic (PLEG): container finished" podID="2d7342e7-facb-49b4-adee-0e6e25c9fa8e" containerID="3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7" exitCode=2 Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.480389 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jdvlj" event={"ID":"2d7342e7-facb-49b4-adee-0e6e25c9fa8e","Type":"ContainerDied","Data":"3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7"} Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.480485 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jdvlj" event={"ID":"2d7342e7-facb-49b4-adee-0e6e25c9fa8e","Type":"ContainerDied","Data":"2d967030b3c8c9b46c87e3ddac89586e700c60edad3cf24e71ed01c8e6cc41e4"} Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.480496 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jdvlj" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.480542 4788 scope.go:117] "RemoveContainer" containerID="3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.483320 4788 generic.go:334] "Generic (PLEG): container finished" podID="72e89d4d-8a18-4508-9e63-096103af0b70" containerID="60b7310d4c7ab847db98c005c96c2a3c3abec78de63a3709206188308ff79320" exitCode=0 Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.483375 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" event={"ID":"72e89d4d-8a18-4508-9e63-096103af0b70","Type":"ContainerDied","Data":"60b7310d4c7ab847db98c005c96c2a3c3abec78de63a3709206188308ff79320"} Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.484312 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" event={"ID":"72e89d4d-8a18-4508-9e63-096103af0b70","Type":"ContainerStarted","Data":"ec47d1e21b080a73dc7c381caa2e6a6d1c5539d8ae60085b9d79b25445ce9897"} Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.509908 4788 scope.go:117] "RemoveContainer" containerID="3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7" Dec 11 09:35:51 crc kubenswrapper[4788]: E1211 09:35:51.511609 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7\": container with ID starting with 3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7 not found: ID does not exist" containerID="3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.511708 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7"} err="failed to get container status \"3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7\": rpc error: code = NotFound desc = could not find container \"3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7\": container with ID starting with 3ab1e1e1eac3aba29343eb04c43513a0937763e58c856e29d9f9c2d4a65ddad7 not found: ID does not exist" Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.523341 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-jdvlj"] Dec 11 09:35:51 crc kubenswrapper[4788]: I1211 09:35:51.531657 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-jdvlj"] Dec 11 09:35:52 crc kubenswrapper[4788]: I1211 09:35:52.503311 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d7342e7-facb-49b4-adee-0e6e25c9fa8e" path="/var/lib/kubelet/pods/2d7342e7-facb-49b4-adee-0e6e25c9fa8e/volumes" Dec 11 09:35:53 crc kubenswrapper[4788]: I1211 09:35:53.500153 4788 generic.go:334] "Generic (PLEG): container finished" podID="72e89d4d-8a18-4508-9e63-096103af0b70" containerID="c9303f0dfc1549863a28de1e943fcb946109cbe8ef2d84a9ab9ee26838b3c37d" exitCode=0 Dec 11 09:35:53 crc kubenswrapper[4788]: I1211 09:35:53.500279 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" event={"ID":"72e89d4d-8a18-4508-9e63-096103af0b70","Type":"ContainerDied","Data":"c9303f0dfc1549863a28de1e943fcb946109cbe8ef2d84a9ab9ee26838b3c37d"} Dec 11 09:35:54 crc kubenswrapper[4788]: I1211 09:35:54.507535 4788 generic.go:334] "Generic (PLEG): container finished" podID="72e89d4d-8a18-4508-9e63-096103af0b70" containerID="ced5a8c8c493683bc3b8e3474bb90a1cce98255c114ec13b9bc29a852d292c4f" exitCode=0 Dec 11 09:35:54 crc kubenswrapper[4788]: I1211 09:35:54.507592 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" event={"ID":"72e89d4d-8a18-4508-9e63-096103af0b70","Type":"ContainerDied","Data":"ced5a8c8c493683bc3b8e3474bb90a1cce98255c114ec13b9bc29a852d292c4f"} Dec 11 09:35:55 crc kubenswrapper[4788]: I1211 09:35:55.828099 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:55 crc kubenswrapper[4788]: I1211 09:35:55.944013 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-bundle\") pod \"72e89d4d-8a18-4508-9e63-096103af0b70\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " Dec 11 09:35:55 crc kubenswrapper[4788]: I1211 09:35:55.944307 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htb8t\" (UniqueName: \"kubernetes.io/projected/72e89d4d-8a18-4508-9e63-096103af0b70-kube-api-access-htb8t\") pod \"72e89d4d-8a18-4508-9e63-096103af0b70\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " Dec 11 09:35:55 crc kubenswrapper[4788]: I1211 09:35:55.944341 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-util\") pod \"72e89d4d-8a18-4508-9e63-096103af0b70\" (UID: \"72e89d4d-8a18-4508-9e63-096103af0b70\") " Dec 11 09:35:55 crc kubenswrapper[4788]: I1211 09:35:55.945835 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-bundle" (OuterVolumeSpecName: "bundle") pod "72e89d4d-8a18-4508-9e63-096103af0b70" (UID: "72e89d4d-8a18-4508-9e63-096103af0b70"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:35:55 crc kubenswrapper[4788]: I1211 09:35:55.952056 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72e89d4d-8a18-4508-9e63-096103af0b70-kube-api-access-htb8t" (OuterVolumeSpecName: "kube-api-access-htb8t") pod "72e89d4d-8a18-4508-9e63-096103af0b70" (UID: "72e89d4d-8a18-4508-9e63-096103af0b70"). InnerVolumeSpecName "kube-api-access-htb8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:35:55 crc kubenswrapper[4788]: I1211 09:35:55.959376 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-util" (OuterVolumeSpecName: "util") pod "72e89d4d-8a18-4508-9e63-096103af0b70" (UID: "72e89d4d-8a18-4508-9e63-096103af0b70"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:35:56 crc kubenswrapper[4788]: I1211 09:35:56.047223 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htb8t\" (UniqueName: \"kubernetes.io/projected/72e89d4d-8a18-4508-9e63-096103af0b70-kube-api-access-htb8t\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:56 crc kubenswrapper[4788]: I1211 09:35:56.047297 4788 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-util\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:56 crc kubenswrapper[4788]: I1211 09:35:56.047315 4788 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/72e89d4d-8a18-4508-9e63-096103af0b70-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:35:56 crc kubenswrapper[4788]: I1211 09:35:56.522857 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" event={"ID":"72e89d4d-8a18-4508-9e63-096103af0b70","Type":"ContainerDied","Data":"ec47d1e21b080a73dc7c381caa2e6a6d1c5539d8ae60085b9d79b25445ce9897"} Dec 11 09:35:56 crc kubenswrapper[4788]: I1211 09:35:56.522934 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec47d1e21b080a73dc7c381caa2e6a6d1c5539d8ae60085b9d79b25445ce9897" Dec 11 09:35:56 crc kubenswrapper[4788]: I1211 09:35:56.522901 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd" Dec 11 09:35:58 crc kubenswrapper[4788]: I1211 09:35:58.390351 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:36:00 crc kubenswrapper[4788]: I1211 09:36:00.455826 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6dz96"] Dec 11 09:36:00 crc kubenswrapper[4788]: I1211 09:36:00.456194 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6dz96" podUID="50681771-96d6-4bd4-ac86-907c2af57451" containerName="registry-server" containerID="cri-o://4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c" gracePeriod=2 Dec 11 09:36:00 crc kubenswrapper[4788]: I1211 09:36:00.919793 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.021742 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-utilities\") pod \"50681771-96d6-4bd4-ac86-907c2af57451\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.021836 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-catalog-content\") pod \"50681771-96d6-4bd4-ac86-907c2af57451\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.021871 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wh9d7\" (UniqueName: \"kubernetes.io/projected/50681771-96d6-4bd4-ac86-907c2af57451-kube-api-access-wh9d7\") pod \"50681771-96d6-4bd4-ac86-907c2af57451\" (UID: \"50681771-96d6-4bd4-ac86-907c2af57451\") " Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.023314 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-utilities" (OuterVolumeSpecName: "utilities") pod "50681771-96d6-4bd4-ac86-907c2af57451" (UID: "50681771-96d6-4bd4-ac86-907c2af57451"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.037474 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50681771-96d6-4bd4-ac86-907c2af57451-kube-api-access-wh9d7" (OuterVolumeSpecName: "kube-api-access-wh9d7") pod "50681771-96d6-4bd4-ac86-907c2af57451" (UID: "50681771-96d6-4bd4-ac86-907c2af57451"). InnerVolumeSpecName "kube-api-access-wh9d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.053668 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "50681771-96d6-4bd4-ac86-907c2af57451" (UID: "50681771-96d6-4bd4-ac86-907c2af57451"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.123265 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.123316 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wh9d7\" (UniqueName: \"kubernetes.io/projected/50681771-96d6-4bd4-ac86-907c2af57451-kube-api-access-wh9d7\") on node \"crc\" DevicePath \"\"" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.123330 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50681771-96d6-4bd4-ac86-907c2af57451-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.559761 4788 generic.go:334] "Generic (PLEG): container finished" podID="50681771-96d6-4bd4-ac86-907c2af57451" containerID="4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c" exitCode=0 Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.559815 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dz96" event={"ID":"50681771-96d6-4bd4-ac86-907c2af57451","Type":"ContainerDied","Data":"4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c"} Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.559853 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6dz96" event={"ID":"50681771-96d6-4bd4-ac86-907c2af57451","Type":"ContainerDied","Data":"a9fb4f4e6f3bee6cb230c829a9251a7cef7dfcbf17827447eee547111109391b"} Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.559875 4788 scope.go:117] "RemoveContainer" containerID="4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.559903 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6dz96" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.587693 4788 scope.go:117] "RemoveContainer" containerID="eebf13febb9d6d376690742c2c330b5f7a7306b92ef3f605987d0a371f18a1ee" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.594378 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6dz96"] Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.605328 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6dz96"] Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.626387 4788 scope.go:117] "RemoveContainer" containerID="1fe68ea0620c85322db8ed1e2491d20e4b7655d09a4c2d8c018e4c72668678d2" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.651596 4788 scope.go:117] "RemoveContainer" containerID="4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c" Dec 11 09:36:01 crc kubenswrapper[4788]: E1211 09:36:01.652066 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c\": container with ID starting with 4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c not found: ID does not exist" containerID="4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.652113 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c"} err="failed to get container status \"4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c\": rpc error: code = NotFound desc = could not find container \"4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c\": container with ID starting with 4a4c856878f837e598db852a8b3d44d56abead3e28153945cbf379aa5be5343c not found: ID does not exist" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.652138 4788 scope.go:117] "RemoveContainer" containerID="eebf13febb9d6d376690742c2c330b5f7a7306b92ef3f605987d0a371f18a1ee" Dec 11 09:36:01 crc kubenswrapper[4788]: E1211 09:36:01.652505 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eebf13febb9d6d376690742c2c330b5f7a7306b92ef3f605987d0a371f18a1ee\": container with ID starting with eebf13febb9d6d376690742c2c330b5f7a7306b92ef3f605987d0a371f18a1ee not found: ID does not exist" containerID="eebf13febb9d6d376690742c2c330b5f7a7306b92ef3f605987d0a371f18a1ee" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.652530 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eebf13febb9d6d376690742c2c330b5f7a7306b92ef3f605987d0a371f18a1ee"} err="failed to get container status \"eebf13febb9d6d376690742c2c330b5f7a7306b92ef3f605987d0a371f18a1ee\": rpc error: code = NotFound desc = could not find container \"eebf13febb9d6d376690742c2c330b5f7a7306b92ef3f605987d0a371f18a1ee\": container with ID starting with eebf13febb9d6d376690742c2c330b5f7a7306b92ef3f605987d0a371f18a1ee not found: ID does not exist" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.652542 4788 scope.go:117] "RemoveContainer" containerID="1fe68ea0620c85322db8ed1e2491d20e4b7655d09a4c2d8c018e4c72668678d2" Dec 11 09:36:01 crc kubenswrapper[4788]: E1211 09:36:01.652849 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fe68ea0620c85322db8ed1e2491d20e4b7655d09a4c2d8c018e4c72668678d2\": container with ID starting with 1fe68ea0620c85322db8ed1e2491d20e4b7655d09a4c2d8c018e4c72668678d2 not found: ID does not exist" containerID="1fe68ea0620c85322db8ed1e2491d20e4b7655d09a4c2d8c018e4c72668678d2" Dec 11 09:36:01 crc kubenswrapper[4788]: I1211 09:36:01.652870 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fe68ea0620c85322db8ed1e2491d20e4b7655d09a4c2d8c018e4c72668678d2"} err="failed to get container status \"1fe68ea0620c85322db8ed1e2491d20e4b7655d09a4c2d8c018e4c72668678d2\": rpc error: code = NotFound desc = could not find container \"1fe68ea0620c85322db8ed1e2491d20e4b7655d09a4c2d8c018e4c72668678d2\": container with ID starting with 1fe68ea0620c85322db8ed1e2491d20e4b7655d09a4c2d8c018e4c72668678d2 not found: ID does not exist" Dec 11 09:36:02 crc kubenswrapper[4788]: I1211 09:36:02.505181 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50681771-96d6-4bd4-ac86-907c2af57451" path="/var/lib/kubelet/pods/50681771-96d6-4bd4-ac86-907c2af57451/volumes" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.958966 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp"] Dec 11 09:36:03 crc kubenswrapper[4788]: E1211 09:36:03.959351 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72e89d4d-8a18-4508-9e63-096103af0b70" containerName="extract" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.959369 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="72e89d4d-8a18-4508-9e63-096103af0b70" containerName="extract" Dec 11 09:36:03 crc kubenswrapper[4788]: E1211 09:36:03.959384 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50681771-96d6-4bd4-ac86-907c2af57451" containerName="extract-content" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.959391 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="50681771-96d6-4bd4-ac86-907c2af57451" containerName="extract-content" Dec 11 09:36:03 crc kubenswrapper[4788]: E1211 09:36:03.959405 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50681771-96d6-4bd4-ac86-907c2af57451" containerName="registry-server" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.959413 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="50681771-96d6-4bd4-ac86-907c2af57451" containerName="registry-server" Dec 11 09:36:03 crc kubenswrapper[4788]: E1211 09:36:03.959422 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72e89d4d-8a18-4508-9e63-096103af0b70" containerName="util" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.959429 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="72e89d4d-8a18-4508-9e63-096103af0b70" containerName="util" Dec 11 09:36:03 crc kubenswrapper[4788]: E1211 09:36:03.959439 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d7342e7-facb-49b4-adee-0e6e25c9fa8e" containerName="console" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.959447 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d7342e7-facb-49b4-adee-0e6e25c9fa8e" containerName="console" Dec 11 09:36:03 crc kubenswrapper[4788]: E1211 09:36:03.959457 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72e89d4d-8a18-4508-9e63-096103af0b70" containerName="pull" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.959464 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="72e89d4d-8a18-4508-9e63-096103af0b70" containerName="pull" Dec 11 09:36:03 crc kubenswrapper[4788]: E1211 09:36:03.959477 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50681771-96d6-4bd4-ac86-907c2af57451" containerName="extract-utilities" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.959488 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="50681771-96d6-4bd4-ac86-907c2af57451" containerName="extract-utilities" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.959640 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="72e89d4d-8a18-4508-9e63-096103af0b70" containerName="extract" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.959662 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d7342e7-facb-49b4-adee-0e6e25c9fa8e" containerName="console" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.959674 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="50681771-96d6-4bd4-ac86-907c2af57451" containerName="registry-server" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.960204 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.964741 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.965764 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-p97qs" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.966041 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.966349 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.966625 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 11 09:36:03 crc kubenswrapper[4788]: I1211 09:36:03.977634 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp"] Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.065162 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8ngf\" (UniqueName: \"kubernetes.io/projected/d17f23e5-47b0-4b87-ab5c-32ac870eb738-kube-api-access-c8ngf\") pod \"metallb-operator-controller-manager-64ccd66d65-8ldsp\" (UID: \"d17f23e5-47b0-4b87-ab5c-32ac870eb738\") " pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.065256 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d17f23e5-47b0-4b87-ab5c-32ac870eb738-apiservice-cert\") pod \"metallb-operator-controller-manager-64ccd66d65-8ldsp\" (UID: \"d17f23e5-47b0-4b87-ab5c-32ac870eb738\") " pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.065563 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d17f23e5-47b0-4b87-ab5c-32ac870eb738-webhook-cert\") pod \"metallb-operator-controller-manager-64ccd66d65-8ldsp\" (UID: \"d17f23e5-47b0-4b87-ab5c-32ac870eb738\") " pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.166641 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d17f23e5-47b0-4b87-ab5c-32ac870eb738-webhook-cert\") pod \"metallb-operator-controller-manager-64ccd66d65-8ldsp\" (UID: \"d17f23e5-47b0-4b87-ab5c-32ac870eb738\") " pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.167167 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8ngf\" (UniqueName: \"kubernetes.io/projected/d17f23e5-47b0-4b87-ab5c-32ac870eb738-kube-api-access-c8ngf\") pod \"metallb-operator-controller-manager-64ccd66d65-8ldsp\" (UID: \"d17f23e5-47b0-4b87-ab5c-32ac870eb738\") " pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.167203 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d17f23e5-47b0-4b87-ab5c-32ac870eb738-apiservice-cert\") pod \"metallb-operator-controller-manager-64ccd66d65-8ldsp\" (UID: \"d17f23e5-47b0-4b87-ab5c-32ac870eb738\") " pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.174059 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d17f23e5-47b0-4b87-ab5c-32ac870eb738-webhook-cert\") pod \"metallb-operator-controller-manager-64ccd66d65-8ldsp\" (UID: \"d17f23e5-47b0-4b87-ab5c-32ac870eb738\") " pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.188957 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d17f23e5-47b0-4b87-ab5c-32ac870eb738-apiservice-cert\") pod \"metallb-operator-controller-manager-64ccd66d65-8ldsp\" (UID: \"d17f23e5-47b0-4b87-ab5c-32ac870eb738\") " pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.193379 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8ngf\" (UniqueName: \"kubernetes.io/projected/d17f23e5-47b0-4b87-ab5c-32ac870eb738-kube-api-access-c8ngf\") pod \"metallb-operator-controller-manager-64ccd66d65-8ldsp\" (UID: \"d17f23e5-47b0-4b87-ab5c-32ac870eb738\") " pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.279519 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.306220 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf"] Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.307449 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.319771 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-wlqbq" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.320056 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.320181 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.337002 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf"] Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.472211 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2fec7bb6-d596-4da7-94d0-567cb78c94b2-apiservice-cert\") pod \"metallb-operator-webhook-server-57bc7764b6-mf4wf\" (UID: \"2fec7bb6-d596-4da7-94d0-567cb78c94b2\") " pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.472837 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2fec7bb6-d596-4da7-94d0-567cb78c94b2-webhook-cert\") pod \"metallb-operator-webhook-server-57bc7764b6-mf4wf\" (UID: \"2fec7bb6-d596-4da7-94d0-567cb78c94b2\") " pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.472866 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7j8z\" (UniqueName: \"kubernetes.io/projected/2fec7bb6-d596-4da7-94d0-567cb78c94b2-kube-api-access-j7j8z\") pod \"metallb-operator-webhook-server-57bc7764b6-mf4wf\" (UID: \"2fec7bb6-d596-4da7-94d0-567cb78c94b2\") " pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.573985 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2fec7bb6-d596-4da7-94d0-567cb78c94b2-apiservice-cert\") pod \"metallb-operator-webhook-server-57bc7764b6-mf4wf\" (UID: \"2fec7bb6-d596-4da7-94d0-567cb78c94b2\") " pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.574080 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2fec7bb6-d596-4da7-94d0-567cb78c94b2-webhook-cert\") pod \"metallb-operator-webhook-server-57bc7764b6-mf4wf\" (UID: \"2fec7bb6-d596-4da7-94d0-567cb78c94b2\") " pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.574123 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7j8z\" (UniqueName: \"kubernetes.io/projected/2fec7bb6-d596-4da7-94d0-567cb78c94b2-kube-api-access-j7j8z\") pod \"metallb-operator-webhook-server-57bc7764b6-mf4wf\" (UID: \"2fec7bb6-d596-4da7-94d0-567cb78c94b2\") " pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.584254 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2fec7bb6-d596-4da7-94d0-567cb78c94b2-webhook-cert\") pod \"metallb-operator-webhook-server-57bc7764b6-mf4wf\" (UID: \"2fec7bb6-d596-4da7-94d0-567cb78c94b2\") " pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.584647 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2fec7bb6-d596-4da7-94d0-567cb78c94b2-apiservice-cert\") pod \"metallb-operator-webhook-server-57bc7764b6-mf4wf\" (UID: \"2fec7bb6-d596-4da7-94d0-567cb78c94b2\") " pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.616416 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7j8z\" (UniqueName: \"kubernetes.io/projected/2fec7bb6-d596-4da7-94d0-567cb78c94b2-kube-api-access-j7j8z\") pod \"metallb-operator-webhook-server-57bc7764b6-mf4wf\" (UID: \"2fec7bb6-d596-4da7-94d0-567cb78c94b2\") " pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.649538 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp"] Dec 11 09:36:04 crc kubenswrapper[4788]: I1211 09:36:04.674816 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:05 crc kubenswrapper[4788]: I1211 09:36:05.133555 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf"] Dec 11 09:36:05 crc kubenswrapper[4788]: W1211 09:36:05.148465 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fec7bb6_d596_4da7_94d0_567cb78c94b2.slice/crio-3f9ae4b26f4c97e7da3dd5d6f99754b52c177e30ef9eeee3bf9ee51c590d0d40 WatchSource:0}: Error finding container 3f9ae4b26f4c97e7da3dd5d6f99754b52c177e30ef9eeee3bf9ee51c590d0d40: Status 404 returned error can't find the container with id 3f9ae4b26f4c97e7da3dd5d6f99754b52c177e30ef9eeee3bf9ee51c590d0d40 Dec 11 09:36:05 crc kubenswrapper[4788]: I1211 09:36:05.597072 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" event={"ID":"d17f23e5-47b0-4b87-ab5c-32ac870eb738","Type":"ContainerStarted","Data":"e9bcc32f5a7e02fe59f2a8a1f20db7c2410288fced50005bfda8c13d7ec191fb"} Dec 11 09:36:05 crc kubenswrapper[4788]: I1211 09:36:05.598085 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" event={"ID":"2fec7bb6-d596-4da7-94d0-567cb78c94b2","Type":"ContainerStarted","Data":"3f9ae4b26f4c97e7da3dd5d6f99754b52c177e30ef9eeee3bf9ee51c590d0d40"} Dec 11 09:36:08 crc kubenswrapper[4788]: I1211 09:36:08.623281 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" event={"ID":"d17f23e5-47b0-4b87-ab5c-32ac870eb738","Type":"ContainerStarted","Data":"7e353b01f1c27fd043c42337967785be728b4839c665bcbc9f7420cac618df71"} Dec 11 09:36:08 crc kubenswrapper[4788]: I1211 09:36:08.623452 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:08 crc kubenswrapper[4788]: I1211 09:36:08.645564 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" podStartSLOduration=2.776858379 podStartE2EDuration="5.645543762s" podCreationTimestamp="2025-12-11 09:36:03 +0000 UTC" firstStartedPulling="2025-12-11 09:36:04.669332221 +0000 UTC m=+894.740111807" lastFinishedPulling="2025-12-11 09:36:07.538017594 +0000 UTC m=+897.608797190" observedRunningTime="2025-12-11 09:36:08.64468514 +0000 UTC m=+898.715464746" watchObservedRunningTime="2025-12-11 09:36:08.645543762 +0000 UTC m=+898.716323348" Dec 11 09:36:11 crc kubenswrapper[4788]: I1211 09:36:11.644299 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" event={"ID":"2fec7bb6-d596-4da7-94d0-567cb78c94b2","Type":"ContainerStarted","Data":"b00331750c47113974d12263327d7d9bab8a851445b5d1b0705c1af8a6996a61"} Dec 11 09:36:11 crc kubenswrapper[4788]: I1211 09:36:11.645600 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:11 crc kubenswrapper[4788]: I1211 09:36:11.668583 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" podStartSLOduration=1.71544351 podStartE2EDuration="7.66855997s" podCreationTimestamp="2025-12-11 09:36:04 +0000 UTC" firstStartedPulling="2025-12-11 09:36:05.152968608 +0000 UTC m=+895.223748194" lastFinishedPulling="2025-12-11 09:36:11.106085068 +0000 UTC m=+901.176864654" observedRunningTime="2025-12-11 09:36:11.664068095 +0000 UTC m=+901.734847691" watchObservedRunningTime="2025-12-11 09:36:11.66855997 +0000 UTC m=+901.739339556" Dec 11 09:36:21 crc kubenswrapper[4788]: I1211 09:36:21.369124 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:36:21 crc kubenswrapper[4788]: I1211 09:36:21.369632 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:36:24 crc kubenswrapper[4788]: I1211 09:36:24.706481 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-57bc7764b6-mf4wf" Dec 11 09:36:44 crc kubenswrapper[4788]: I1211 09:36:44.283731 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-64ccd66d65-8ldsp" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.044353 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-thk86"] Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.046935 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.049358 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.049440 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw"] Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.049377 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-kqn9k" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.050464 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.052438 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.054099 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-metrics\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.054143 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-reloader\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.054209 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-frr-sockets\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.054442 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/20680b9d-91a0-4194-a35b-a25c14869938-frr-startup\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.054515 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/20680b9d-91a0-4194-a35b-a25c14869938-metrics-certs\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.054549 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxtmx\" (UniqueName: \"kubernetes.io/projected/20680b9d-91a0-4194-a35b-a25c14869938-kube-api-access-cxtmx\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.054571 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-frr-conf\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.055530 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.061930 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw"] Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.154473 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-wfdvw"] Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.155468 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156043 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-frr-sockets\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156109 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/20680b9d-91a0-4194-a35b-a25c14869938-frr-startup\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156140 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/20680b9d-91a0-4194-a35b-a25c14869938-metrics-certs\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156162 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxtmx\" (UniqueName: \"kubernetes.io/projected/20680b9d-91a0-4194-a35b-a25c14869938-kube-api-access-cxtmx\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156183 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-frr-conf\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156219 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdn5n\" (UniqueName: \"kubernetes.io/projected/1ed60fe2-ac5e-46b3-a0f1-05436db532bb-kube-api-access-fdn5n\") pod \"frr-k8s-webhook-server-7784b6fcf-fc5pw\" (UID: \"1ed60fe2-ac5e-46b3-a0f1-05436db532bb\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156257 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-metrics\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156276 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-reloader\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156308 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ed60fe2-ac5e-46b3-a0f1-05436db532bb-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-fc5pw\" (UID: \"1ed60fe2-ac5e-46b3-a0f1-05436db532bb\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" Dec 11 09:36:45 crc kubenswrapper[4788]: E1211 09:36:45.156592 4788 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 11 09:36:45 crc kubenswrapper[4788]: E1211 09:36:45.156672 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/20680b9d-91a0-4194-a35b-a25c14869938-metrics-certs podName:20680b9d-91a0-4194-a35b-a25c14869938 nodeName:}" failed. No retries permitted until 2025-12-11 09:36:45.656642794 +0000 UTC m=+935.727422380 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/20680b9d-91a0-4194-a35b-a25c14869938-metrics-certs") pod "frr-k8s-thk86" (UID: "20680b9d-91a0-4194-a35b-a25c14869938") : secret "frr-k8s-certs-secret" not found Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156671 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-frr-sockets\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156743 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-metrics\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156770 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-frr-conf\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.156976 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/20680b9d-91a0-4194-a35b-a25c14869938-reloader\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.157542 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/20680b9d-91a0-4194-a35b-a25c14869938-frr-startup\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.168963 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.169937 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.170431 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-rggtp" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.170708 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.193193 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5bddd4b946-82svd"] Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.194834 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.199480 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.201060 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxtmx\" (UniqueName: \"kubernetes.io/projected/20680b9d-91a0-4194-a35b-a25c14869938-kube-api-access-cxtmx\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.217053 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-82svd"] Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.260086 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ac39e555-61a7-48d2-976e-e3a095bae216-cert\") pod \"controller-5bddd4b946-82svd\" (UID: \"ac39e555-61a7-48d2-976e-e3a095bae216\") " pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.260144 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdn5n\" (UniqueName: \"kubernetes.io/projected/1ed60fe2-ac5e-46b3-a0f1-05436db532bb-kube-api-access-fdn5n\") pod \"frr-k8s-webhook-server-7784b6fcf-fc5pw\" (UID: \"1ed60fe2-ac5e-46b3-a0f1-05436db532bb\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.260167 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9edef12f-0a1f-45ad-8850-0d2edfc5384c-metallb-excludel2\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.260197 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ed60fe2-ac5e-46b3-a0f1-05436db532bb-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-fc5pw\" (UID: \"1ed60fe2-ac5e-46b3-a0f1-05436db532bb\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.260247 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-memberlist\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.260270 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac39e555-61a7-48d2-976e-e3a095bae216-metrics-certs\") pod \"controller-5bddd4b946-82svd\" (UID: \"ac39e555-61a7-48d2-976e-e3a095bae216\") " pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.260288 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhjkq\" (UniqueName: \"kubernetes.io/projected/9edef12f-0a1f-45ad-8850-0d2edfc5384c-kube-api-access-hhjkq\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.260307 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-metrics-certs\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.260344 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jsnf\" (UniqueName: \"kubernetes.io/projected/ac39e555-61a7-48d2-976e-e3a095bae216-kube-api-access-2jsnf\") pod \"controller-5bddd4b946-82svd\" (UID: \"ac39e555-61a7-48d2-976e-e3a095bae216\") " pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.266030 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ed60fe2-ac5e-46b3-a0f1-05436db532bb-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-fc5pw\" (UID: \"1ed60fe2-ac5e-46b3-a0f1-05436db532bb\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.297258 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdn5n\" (UniqueName: \"kubernetes.io/projected/1ed60fe2-ac5e-46b3-a0f1-05436db532bb-kube-api-access-fdn5n\") pod \"frr-k8s-webhook-server-7784b6fcf-fc5pw\" (UID: \"1ed60fe2-ac5e-46b3-a0f1-05436db532bb\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.361516 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jsnf\" (UniqueName: \"kubernetes.io/projected/ac39e555-61a7-48d2-976e-e3a095bae216-kube-api-access-2jsnf\") pod \"controller-5bddd4b946-82svd\" (UID: \"ac39e555-61a7-48d2-976e-e3a095bae216\") " pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.361589 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ac39e555-61a7-48d2-976e-e3a095bae216-cert\") pod \"controller-5bddd4b946-82svd\" (UID: \"ac39e555-61a7-48d2-976e-e3a095bae216\") " pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.361617 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9edef12f-0a1f-45ad-8850-0d2edfc5384c-metallb-excludel2\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.361670 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-memberlist\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.361699 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhjkq\" (UniqueName: \"kubernetes.io/projected/9edef12f-0a1f-45ad-8850-0d2edfc5384c-kube-api-access-hhjkq\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.361714 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac39e555-61a7-48d2-976e-e3a095bae216-metrics-certs\") pod \"controller-5bddd4b946-82svd\" (UID: \"ac39e555-61a7-48d2-976e-e3a095bae216\") " pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.361734 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-metrics-certs\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: E1211 09:36:45.361844 4788 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 11 09:36:45 crc kubenswrapper[4788]: E1211 09:36:45.361850 4788 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 11 09:36:45 crc kubenswrapper[4788]: E1211 09:36:45.361901 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-metrics-certs podName:9edef12f-0a1f-45ad-8850-0d2edfc5384c nodeName:}" failed. No retries permitted until 2025-12-11 09:36:45.861882948 +0000 UTC m=+935.932662534 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-metrics-certs") pod "speaker-wfdvw" (UID: "9edef12f-0a1f-45ad-8850-0d2edfc5384c") : secret "speaker-certs-secret" not found Dec 11 09:36:45 crc kubenswrapper[4788]: E1211 09:36:45.361915 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-memberlist podName:9edef12f-0a1f-45ad-8850-0d2edfc5384c nodeName:}" failed. No retries permitted until 2025-12-11 09:36:45.861910009 +0000 UTC m=+935.932689595 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-memberlist") pod "speaker-wfdvw" (UID: "9edef12f-0a1f-45ad-8850-0d2edfc5384c") : secret "metallb-memberlist" not found Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.362652 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9edef12f-0a1f-45ad-8850-0d2edfc5384c-metallb-excludel2\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.366068 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac39e555-61a7-48d2-976e-e3a095bae216-metrics-certs\") pod \"controller-5bddd4b946-82svd\" (UID: \"ac39e555-61a7-48d2-976e-e3a095bae216\") " pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.370498 4788 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.377070 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ac39e555-61a7-48d2-976e-e3a095bae216-cert\") pod \"controller-5bddd4b946-82svd\" (UID: \"ac39e555-61a7-48d2-976e-e3a095bae216\") " pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.380083 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.381877 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jsnf\" (UniqueName: \"kubernetes.io/projected/ac39e555-61a7-48d2-976e-e3a095bae216-kube-api-access-2jsnf\") pod \"controller-5bddd4b946-82svd\" (UID: \"ac39e555-61a7-48d2-976e-e3a095bae216\") " pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.384842 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhjkq\" (UniqueName: \"kubernetes.io/projected/9edef12f-0a1f-45ad-8850-0d2edfc5384c-kube-api-access-hhjkq\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.559856 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.651101 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw"] Dec 11 09:36:45 crc kubenswrapper[4788]: W1211 09:36:45.664049 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ed60fe2_ac5e_46b3_a0f1_05436db532bb.slice/crio-22d15967662ab41ebba03aacc907e98476321835dc3dfd7ac59eb63101d4740d WatchSource:0}: Error finding container 22d15967662ab41ebba03aacc907e98476321835dc3dfd7ac59eb63101d4740d: Status 404 returned error can't find the container with id 22d15967662ab41ebba03aacc907e98476321835dc3dfd7ac59eb63101d4740d Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.668392 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/20680b9d-91a0-4194-a35b-a25c14869938-metrics-certs\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.676023 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/20680b9d-91a0-4194-a35b-a25c14869938-metrics-certs\") pod \"frr-k8s-thk86\" (UID: \"20680b9d-91a0-4194-a35b-a25c14869938\") " pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.787796 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-82svd"] Dec 11 09:36:45 crc kubenswrapper[4788]: W1211 09:36:45.794626 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac39e555_61a7_48d2_976e_e3a095bae216.slice/crio-5ca160805db66022a51560a9c6226a9bc4ec9ebd6099bd79893e7b5473afc8d7 WatchSource:0}: Error finding container 5ca160805db66022a51560a9c6226a9bc4ec9ebd6099bd79893e7b5473afc8d7: Status 404 returned error can't find the container with id 5ca160805db66022a51560a9c6226a9bc4ec9ebd6099bd79893e7b5473afc8d7 Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.871521 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-metrics-certs\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.871642 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-memberlist\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: E1211 09:36:45.871805 4788 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 11 09:36:45 crc kubenswrapper[4788]: E1211 09:36:45.871879 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-memberlist podName:9edef12f-0a1f-45ad-8850-0d2edfc5384c nodeName:}" failed. No retries permitted until 2025-12-11 09:36:46.871854972 +0000 UTC m=+936.942634558 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-memberlist") pod "speaker-wfdvw" (UID: "9edef12f-0a1f-45ad-8850-0d2edfc5384c") : secret "metallb-memberlist" not found Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.875507 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-metrics-certs\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.877256 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-82svd" event={"ID":"ac39e555-61a7-48d2-976e-e3a095bae216","Type":"ContainerStarted","Data":"5ca160805db66022a51560a9c6226a9bc4ec9ebd6099bd79893e7b5473afc8d7"} Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.878906 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" event={"ID":"1ed60fe2-ac5e-46b3-a0f1-05436db532bb","Type":"ContainerStarted","Data":"22d15967662ab41ebba03aacc907e98476321835dc3dfd7ac59eb63101d4740d"} Dec 11 09:36:45 crc kubenswrapper[4788]: I1211 09:36:45.972743 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:46 crc kubenswrapper[4788]: I1211 09:36:46.886022 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thk86" event={"ID":"20680b9d-91a0-4194-a35b-a25c14869938","Type":"ContainerStarted","Data":"92da46425db4e49b4507521414ed90ef3eb4861599936793c7fe5ec8dd7bfd1b"} Dec 11 09:36:46 crc kubenswrapper[4788]: I1211 09:36:46.888325 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-82svd" event={"ID":"ac39e555-61a7-48d2-976e-e3a095bae216","Type":"ContainerStarted","Data":"2d47a0248b0266ff82ed2e87420cd9196be901d95b8b5c7a499ab7e3ecd35bcc"} Dec 11 09:36:46 crc kubenswrapper[4788]: I1211 09:36:46.888364 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-82svd" event={"ID":"ac39e555-61a7-48d2-976e-e3a095bae216","Type":"ContainerStarted","Data":"6b6f5f2ff05e9479925da3e553989bb32be2416c4c25e9f32c19ac85899ee6f6"} Dec 11 09:36:46 crc kubenswrapper[4788]: I1211 09:36:46.888523 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:46 crc kubenswrapper[4788]: I1211 09:36:46.892011 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-memberlist\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:46 crc kubenswrapper[4788]: I1211 09:36:46.904851 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9edef12f-0a1f-45ad-8850-0d2edfc5384c-memberlist\") pod \"speaker-wfdvw\" (UID: \"9edef12f-0a1f-45ad-8850-0d2edfc5384c\") " pod="metallb-system/speaker-wfdvw" Dec 11 09:36:46 crc kubenswrapper[4788]: I1211 09:36:46.911668 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5bddd4b946-82svd" podStartSLOduration=1.9116504010000002 podStartE2EDuration="1.911650401s" podCreationTimestamp="2025-12-11 09:36:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:36:46.906361375 +0000 UTC m=+936.977140981" watchObservedRunningTime="2025-12-11 09:36:46.911650401 +0000 UTC m=+936.982429987" Dec 11 09:36:46 crc kubenswrapper[4788]: I1211 09:36:46.971324 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-wfdvw" Dec 11 09:36:46 crc kubenswrapper[4788]: W1211 09:36:46.995124 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9edef12f_0a1f_45ad_8850_0d2edfc5384c.slice/crio-1936a955d8ec0ba8a10b49b020430cd0f6938636211dd3466e33dc0831c967e8 WatchSource:0}: Error finding container 1936a955d8ec0ba8a10b49b020430cd0f6938636211dd3466e33dc0831c967e8: Status 404 returned error can't find the container with id 1936a955d8ec0ba8a10b49b020430cd0f6938636211dd3466e33dc0831c967e8 Dec 11 09:36:47 crc kubenswrapper[4788]: I1211 09:36:47.907943 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-wfdvw" event={"ID":"9edef12f-0a1f-45ad-8850-0d2edfc5384c","Type":"ContainerStarted","Data":"4e04ad66dc7dd8a538545dc624b2e090b28270ceab5655765622fa79d1870136"} Dec 11 09:36:47 crc kubenswrapper[4788]: I1211 09:36:47.908406 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-wfdvw" event={"ID":"9edef12f-0a1f-45ad-8850-0d2edfc5384c","Type":"ContainerStarted","Data":"1901fcb0dd3e5cfc536624850740b68e41352da55bbcb1005f29c46e24e239c9"} Dec 11 09:36:47 crc kubenswrapper[4788]: I1211 09:36:47.908419 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-wfdvw" event={"ID":"9edef12f-0a1f-45ad-8850-0d2edfc5384c","Type":"ContainerStarted","Data":"1936a955d8ec0ba8a10b49b020430cd0f6938636211dd3466e33dc0831c967e8"} Dec 11 09:36:47 crc kubenswrapper[4788]: I1211 09:36:47.908837 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-wfdvw" Dec 11 09:36:47 crc kubenswrapper[4788]: I1211 09:36:47.958251 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-wfdvw" podStartSLOduration=2.958201712 podStartE2EDuration="2.958201712s" podCreationTimestamp="2025-12-11 09:36:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:36:47.953822679 +0000 UTC m=+938.024602265" watchObservedRunningTime="2025-12-11 09:36:47.958201712 +0000 UTC m=+938.028981298" Dec 11 09:36:51 crc kubenswrapper[4788]: I1211 09:36:51.369870 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:36:51 crc kubenswrapper[4788]: I1211 09:36:51.370563 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:36:51 crc kubenswrapper[4788]: I1211 09:36:51.370606 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:36:51 crc kubenswrapper[4788]: I1211 09:36:51.372455 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5b3f817de5e2b917218a768a7a64c16521218e5cdb66d80136d52d5195848c43"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 09:36:51 crc kubenswrapper[4788]: I1211 09:36:51.372516 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://5b3f817de5e2b917218a768a7a64c16521218e5cdb66d80136d52d5195848c43" gracePeriod=600 Dec 11 09:36:51 crc kubenswrapper[4788]: I1211 09:36:51.938742 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="5b3f817de5e2b917218a768a7a64c16521218e5cdb66d80136d52d5195848c43" exitCode=0 Dec 11 09:36:51 crc kubenswrapper[4788]: I1211 09:36:51.938802 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"5b3f817de5e2b917218a768a7a64c16521218e5cdb66d80136d52d5195848c43"} Dec 11 09:36:51 crc kubenswrapper[4788]: I1211 09:36:51.938842 4788 scope.go:117] "RemoveContainer" containerID="00dba22c423f7be20758dd52c8556ca67b0617820fa8e27466826983b382a0ca" Dec 11 09:36:53 crc kubenswrapper[4788]: I1211 09:36:53.953592 4788 generic.go:334] "Generic (PLEG): container finished" podID="20680b9d-91a0-4194-a35b-a25c14869938" containerID="12c009c411d320f15453de07c2a57980258a5346096876c793b6258f847fb688" exitCode=0 Dec 11 09:36:53 crc kubenswrapper[4788]: I1211 09:36:53.954433 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thk86" event={"ID":"20680b9d-91a0-4194-a35b-a25c14869938","Type":"ContainerDied","Data":"12c009c411d320f15453de07c2a57980258a5346096876c793b6258f847fb688"} Dec 11 09:36:53 crc kubenswrapper[4788]: I1211 09:36:53.994496 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"45b7a6743f34908def35f21206c5184bf57302d15d03214ff0c0f9850ad5924d"} Dec 11 09:36:54 crc kubenswrapper[4788]: I1211 09:36:54.002788 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" event={"ID":"1ed60fe2-ac5e-46b3-a0f1-05436db532bb","Type":"ContainerStarted","Data":"90881a5587a8ec96f3badd6c21984aeb25c0af0cdfbb3847b4d2c2a32bef7eb0"} Dec 11 09:36:54 crc kubenswrapper[4788]: I1211 09:36:54.003565 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" Dec 11 09:36:54 crc kubenswrapper[4788]: I1211 09:36:54.049137 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" podStartSLOduration=1.176930421 podStartE2EDuration="9.049111472s" podCreationTimestamp="2025-12-11 09:36:45 +0000 UTC" firstStartedPulling="2025-12-11 09:36:45.669645146 +0000 UTC m=+935.740424732" lastFinishedPulling="2025-12-11 09:36:53.541826197 +0000 UTC m=+943.612605783" observedRunningTime="2025-12-11 09:36:54.049113292 +0000 UTC m=+944.119892888" watchObservedRunningTime="2025-12-11 09:36:54.049111472 +0000 UTC m=+944.119891058" Dec 11 09:36:55 crc kubenswrapper[4788]: I1211 09:36:55.011621 4788 generic.go:334] "Generic (PLEG): container finished" podID="20680b9d-91a0-4194-a35b-a25c14869938" containerID="b5c620577d57c0f1374a92ea6a0b36789bf0a675f377138c70a0fd2028c647dd" exitCode=0 Dec 11 09:36:55 crc kubenswrapper[4788]: I1211 09:36:55.011713 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thk86" event={"ID":"20680b9d-91a0-4194-a35b-a25c14869938","Type":"ContainerDied","Data":"b5c620577d57c0f1374a92ea6a0b36789bf0a675f377138c70a0fd2028c647dd"} Dec 11 09:36:55 crc kubenswrapper[4788]: I1211 09:36:55.564323 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5bddd4b946-82svd" Dec 11 09:36:56 crc kubenswrapper[4788]: I1211 09:36:56.020656 4788 generic.go:334] "Generic (PLEG): container finished" podID="20680b9d-91a0-4194-a35b-a25c14869938" containerID="40bebc873b28993f9d37dcc00eaabd1294e0ae9635cff037afc9cfeff40d9b54" exitCode=0 Dec 11 09:36:56 crc kubenswrapper[4788]: I1211 09:36:56.020716 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thk86" event={"ID":"20680b9d-91a0-4194-a35b-a25c14869938","Type":"ContainerDied","Data":"40bebc873b28993f9d37dcc00eaabd1294e0ae9635cff037afc9cfeff40d9b54"} Dec 11 09:36:57 crc kubenswrapper[4788]: I1211 09:36:57.032052 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thk86" event={"ID":"20680b9d-91a0-4194-a35b-a25c14869938","Type":"ContainerStarted","Data":"5ee492a9f9bc141a55078a46c86184dd051d9d2d4112cfb88c93e184d62cce57"} Dec 11 09:36:57 crc kubenswrapper[4788]: I1211 09:36:57.033464 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thk86" event={"ID":"20680b9d-91a0-4194-a35b-a25c14869938","Type":"ContainerStarted","Data":"08f57add92d34f4ddfe3f81fe5013235dd5b0669a18034bdc849e05f419eeb2c"} Dec 11 09:36:57 crc kubenswrapper[4788]: I1211 09:36:57.033479 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thk86" event={"ID":"20680b9d-91a0-4194-a35b-a25c14869938","Type":"ContainerStarted","Data":"b579fd75290e5a7744b6f6c7d268b6f6d2248e0050d7cf63542c4c771bf03a7b"} Dec 11 09:36:57 crc kubenswrapper[4788]: I1211 09:36:57.033492 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thk86" event={"ID":"20680b9d-91a0-4194-a35b-a25c14869938","Type":"ContainerStarted","Data":"787e76b0c30a5324efac88e79dfc7a3d1b611ab7a062bdebf5d74bc527cde6e0"} Dec 11 09:36:57 crc kubenswrapper[4788]: I1211 09:36:57.033502 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thk86" event={"ID":"20680b9d-91a0-4194-a35b-a25c14869938","Type":"ContainerStarted","Data":"c29d91fb56c24c2ff740d4061388f8522c5bc93e5c942108e7b20ffa98671fb3"} Dec 11 09:36:58 crc kubenswrapper[4788]: I1211 09:36:58.047788 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-thk86" event={"ID":"20680b9d-91a0-4194-a35b-a25c14869938","Type":"ContainerStarted","Data":"f89cfa30faf9984e54cd3c2018de798d36d6b4264e3725fe95f47292aa6336b8"} Dec 11 09:36:58 crc kubenswrapper[4788]: I1211 09:36:58.048552 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-thk86" Dec 11 09:36:58 crc kubenswrapper[4788]: I1211 09:36:58.081471 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-thk86" podStartSLOduration=5.647613797 podStartE2EDuration="13.081437974s" podCreationTimestamp="2025-12-11 09:36:45 +0000 UTC" firstStartedPulling="2025-12-11 09:36:46.131775111 +0000 UTC m=+936.202554697" lastFinishedPulling="2025-12-11 09:36:53.565599278 +0000 UTC m=+943.636378874" observedRunningTime="2025-12-11 09:36:58.079591326 +0000 UTC m=+948.150370922" watchObservedRunningTime="2025-12-11 09:36:58.081437974 +0000 UTC m=+948.152217570" Dec 11 09:37:00 crc kubenswrapper[4788]: I1211 09:37:00.973899 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-thk86" Dec 11 09:37:01 crc kubenswrapper[4788]: I1211 09:37:01.044306 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-thk86" Dec 11 09:37:05 crc kubenswrapper[4788]: I1211 09:37:05.387194 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-fc5pw" Dec 11 09:37:06 crc kubenswrapper[4788]: I1211 09:37:06.976123 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-wfdvw" Dec 11 09:37:09 crc kubenswrapper[4788]: I1211 09:37:09.803012 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-2lvrn"] Dec 11 09:37:09 crc kubenswrapper[4788]: I1211 09:37:09.804627 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2lvrn" Dec 11 09:37:09 crc kubenswrapper[4788]: I1211 09:37:09.807120 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-wkx6j" Dec 11 09:37:09 crc kubenswrapper[4788]: I1211 09:37:09.807312 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 11 09:37:09 crc kubenswrapper[4788]: I1211 09:37:09.807541 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 11 09:37:09 crc kubenswrapper[4788]: I1211 09:37:09.820376 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2lvrn"] Dec 11 09:37:09 crc kubenswrapper[4788]: I1211 09:37:09.924644 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt9wd\" (UniqueName: \"kubernetes.io/projected/1d8151b3-a012-435f-8e9f-4b36e0434814-kube-api-access-qt9wd\") pod \"openstack-operator-index-2lvrn\" (UID: \"1d8151b3-a012-435f-8e9f-4b36e0434814\") " pod="openstack-operators/openstack-operator-index-2lvrn" Dec 11 09:37:10 crc kubenswrapper[4788]: I1211 09:37:10.026279 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt9wd\" (UniqueName: \"kubernetes.io/projected/1d8151b3-a012-435f-8e9f-4b36e0434814-kube-api-access-qt9wd\") pod \"openstack-operator-index-2lvrn\" (UID: \"1d8151b3-a012-435f-8e9f-4b36e0434814\") " pod="openstack-operators/openstack-operator-index-2lvrn" Dec 11 09:37:10 crc kubenswrapper[4788]: I1211 09:37:10.048188 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt9wd\" (UniqueName: \"kubernetes.io/projected/1d8151b3-a012-435f-8e9f-4b36e0434814-kube-api-access-qt9wd\") pod \"openstack-operator-index-2lvrn\" (UID: \"1d8151b3-a012-435f-8e9f-4b36e0434814\") " pod="openstack-operators/openstack-operator-index-2lvrn" Dec 11 09:37:10 crc kubenswrapper[4788]: I1211 09:37:10.126408 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2lvrn" Dec 11 09:37:10 crc kubenswrapper[4788]: I1211 09:37:10.368653 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2lvrn"] Dec 11 09:37:11 crc kubenswrapper[4788]: I1211 09:37:11.129535 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2lvrn" event={"ID":"1d8151b3-a012-435f-8e9f-4b36e0434814","Type":"ContainerStarted","Data":"a1ae7dd89dd717d23d7d6149fbff4d54f2ecb8129890b304b93ba1ee3026c41b"} Dec 11 09:37:11 crc kubenswrapper[4788]: I1211 09:37:11.971501 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2lvrn"] Dec 11 09:37:12 crc kubenswrapper[4788]: I1211 09:37:12.378807 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-8g2bh"] Dec 11 09:37:12 crc kubenswrapper[4788]: I1211 09:37:12.379586 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8g2bh" Dec 11 09:37:12 crc kubenswrapper[4788]: I1211 09:37:12.390203 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-8g2bh"] Dec 11 09:37:12 crc kubenswrapper[4788]: I1211 09:37:12.462945 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhlw2\" (UniqueName: \"kubernetes.io/projected/78fdcbf0-ff20-43b8-bf9a-ded097de063c-kube-api-access-vhlw2\") pod \"openstack-operator-index-8g2bh\" (UID: \"78fdcbf0-ff20-43b8-bf9a-ded097de063c\") " pod="openstack-operators/openstack-operator-index-8g2bh" Dec 11 09:37:12 crc kubenswrapper[4788]: I1211 09:37:12.564732 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhlw2\" (UniqueName: \"kubernetes.io/projected/78fdcbf0-ff20-43b8-bf9a-ded097de063c-kube-api-access-vhlw2\") pod \"openstack-operator-index-8g2bh\" (UID: \"78fdcbf0-ff20-43b8-bf9a-ded097de063c\") " pod="openstack-operators/openstack-operator-index-8g2bh" Dec 11 09:37:12 crc kubenswrapper[4788]: I1211 09:37:12.585541 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhlw2\" (UniqueName: \"kubernetes.io/projected/78fdcbf0-ff20-43b8-bf9a-ded097de063c-kube-api-access-vhlw2\") pod \"openstack-operator-index-8g2bh\" (UID: \"78fdcbf0-ff20-43b8-bf9a-ded097de063c\") " pod="openstack-operators/openstack-operator-index-8g2bh" Dec 11 09:37:12 crc kubenswrapper[4788]: I1211 09:37:12.744535 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8g2bh" Dec 11 09:37:14 crc kubenswrapper[4788]: I1211 09:37:14.083719 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-8g2bh"] Dec 11 09:37:14 crc kubenswrapper[4788]: W1211 09:37:14.085427 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78fdcbf0_ff20_43b8_bf9a_ded097de063c.slice/crio-741ecba7d9a52f17ffe0bf9858bcfe1a943c03b8a69e17527114dd33bdc69336 WatchSource:0}: Error finding container 741ecba7d9a52f17ffe0bf9858bcfe1a943c03b8a69e17527114dd33bdc69336: Status 404 returned error can't find the container with id 741ecba7d9a52f17ffe0bf9858bcfe1a943c03b8a69e17527114dd33bdc69336 Dec 11 09:37:14 crc kubenswrapper[4788]: I1211 09:37:14.157678 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8g2bh" event={"ID":"78fdcbf0-ff20-43b8-bf9a-ded097de063c","Type":"ContainerStarted","Data":"741ecba7d9a52f17ffe0bf9858bcfe1a943c03b8a69e17527114dd33bdc69336"} Dec 11 09:37:14 crc kubenswrapper[4788]: I1211 09:37:14.159143 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2lvrn" event={"ID":"1d8151b3-a012-435f-8e9f-4b36e0434814","Type":"ContainerStarted","Data":"963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8"} Dec 11 09:37:14 crc kubenswrapper[4788]: I1211 09:37:14.159433 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-2lvrn" podUID="1d8151b3-a012-435f-8e9f-4b36e0434814" containerName="registry-server" containerID="cri-o://963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8" gracePeriod=2 Dec 11 09:37:14 crc kubenswrapper[4788]: I1211 09:37:14.181244 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-2lvrn" podStartSLOduration=1.85861155 podStartE2EDuration="5.181224746s" podCreationTimestamp="2025-12-11 09:37:09 +0000 UTC" firstStartedPulling="2025-12-11 09:37:10.380102054 +0000 UTC m=+960.450881640" lastFinishedPulling="2025-12-11 09:37:13.70271525 +0000 UTC m=+963.773494836" observedRunningTime="2025-12-11 09:37:14.17864638 +0000 UTC m=+964.249425986" watchObservedRunningTime="2025-12-11 09:37:14.181224746 +0000 UTC m=+964.252004332" Dec 11 09:37:14 crc kubenswrapper[4788]: I1211 09:37:14.477556 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2lvrn" Dec 11 09:37:14 crc kubenswrapper[4788]: I1211 09:37:14.599400 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt9wd\" (UniqueName: \"kubernetes.io/projected/1d8151b3-a012-435f-8e9f-4b36e0434814-kube-api-access-qt9wd\") pod \"1d8151b3-a012-435f-8e9f-4b36e0434814\" (UID: \"1d8151b3-a012-435f-8e9f-4b36e0434814\") " Dec 11 09:37:14 crc kubenswrapper[4788]: I1211 09:37:14.606214 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d8151b3-a012-435f-8e9f-4b36e0434814-kube-api-access-qt9wd" (OuterVolumeSpecName: "kube-api-access-qt9wd") pod "1d8151b3-a012-435f-8e9f-4b36e0434814" (UID: "1d8151b3-a012-435f-8e9f-4b36e0434814"). InnerVolumeSpecName "kube-api-access-qt9wd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:37:14 crc kubenswrapper[4788]: I1211 09:37:14.700844 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt9wd\" (UniqueName: \"kubernetes.io/projected/1d8151b3-a012-435f-8e9f-4b36e0434814-kube-api-access-qt9wd\") on node \"crc\" DevicePath \"\"" Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.168179 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8g2bh" event={"ID":"78fdcbf0-ff20-43b8-bf9a-ded097de063c","Type":"ContainerStarted","Data":"46be5cb3cd5c5d453ced7f462a033ba5af4ebf1053b6b16bf3ba052fcc5d8fc5"} Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.169689 4788 generic.go:334] "Generic (PLEG): container finished" podID="1d8151b3-a012-435f-8e9f-4b36e0434814" containerID="963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8" exitCode=0 Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.169732 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2lvrn" event={"ID":"1d8151b3-a012-435f-8e9f-4b36e0434814","Type":"ContainerDied","Data":"963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8"} Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.169760 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2lvrn" event={"ID":"1d8151b3-a012-435f-8e9f-4b36e0434814","Type":"ContainerDied","Data":"a1ae7dd89dd717d23d7d6149fbff4d54f2ecb8129890b304b93ba1ee3026c41b"} Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.169781 4788 scope.go:117] "RemoveContainer" containerID="963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8" Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.169875 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2lvrn" Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.190003 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-8g2bh" podStartSLOduration=3.105696761 podStartE2EDuration="3.189969437s" podCreationTimestamp="2025-12-11 09:37:12 +0000 UTC" firstStartedPulling="2025-12-11 09:37:14.090135405 +0000 UTC m=+964.160914991" lastFinishedPulling="2025-12-11 09:37:14.174408081 +0000 UTC m=+964.245187667" observedRunningTime="2025-12-11 09:37:15.189060573 +0000 UTC m=+965.259840169" watchObservedRunningTime="2025-12-11 09:37:15.189969437 +0000 UTC m=+965.260749023" Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.191689 4788 scope.go:117] "RemoveContainer" containerID="963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8" Dec 11 09:37:15 crc kubenswrapper[4788]: E1211 09:37:15.192407 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8\": container with ID starting with 963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8 not found: ID does not exist" containerID="963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8" Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.192457 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8"} err="failed to get container status \"963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8\": rpc error: code = NotFound desc = could not find container \"963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8\": container with ID starting with 963948aec5b5b9d3f7aaecef5876fdff7c5cd91b0b8f1df50d1d2ad1dcb26ff8 not found: ID does not exist" Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.216489 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2lvrn"] Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.221674 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-2lvrn"] Dec 11 09:37:15 crc kubenswrapper[4788]: I1211 09:37:15.976332 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-thk86" Dec 11 09:37:16 crc kubenswrapper[4788]: I1211 09:37:16.504156 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d8151b3-a012-435f-8e9f-4b36e0434814" path="/var/lib/kubelet/pods/1d8151b3-a012-435f-8e9f-4b36e0434814/volumes" Dec 11 09:37:22 crc kubenswrapper[4788]: I1211 09:37:22.745303 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-8g2bh" Dec 11 09:37:22 crc kubenswrapper[4788]: I1211 09:37:22.746007 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-8g2bh" Dec 11 09:37:22 crc kubenswrapper[4788]: I1211 09:37:22.774160 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-8g2bh" Dec 11 09:37:23 crc kubenswrapper[4788]: I1211 09:37:23.242882 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-8g2bh" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.029671 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg"] Dec 11 09:37:36 crc kubenswrapper[4788]: E1211 09:37:36.030683 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d8151b3-a012-435f-8e9f-4b36e0434814" containerName="registry-server" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.030699 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d8151b3-a012-435f-8e9f-4b36e0434814" containerName="registry-server" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.030816 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d8151b3-a012-435f-8e9f-4b36e0434814" containerName="registry-server" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.031710 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.039109 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-89nsn" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.041104 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg"] Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.122701 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-bundle\") pod \"e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.122748 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-util\") pod \"e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.122778 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpt8c\" (UniqueName: \"kubernetes.io/projected/d16150ce-c1df-425d-b361-f6aba1dba525-kube-api-access-vpt8c\") pod \"e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.224255 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-bundle\") pod \"e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.224299 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-util\") pod \"e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.224353 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpt8c\" (UniqueName: \"kubernetes.io/projected/d16150ce-c1df-425d-b361-f6aba1dba525-kube-api-access-vpt8c\") pod \"e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.225547 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-bundle\") pod \"e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.225583 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-util\") pod \"e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.245502 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpt8c\" (UniqueName: \"kubernetes.io/projected/d16150ce-c1df-425d-b361-f6aba1dba525-kube-api-access-vpt8c\") pod \"e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.351080 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:36 crc kubenswrapper[4788]: I1211 09:37:36.799061 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg"] Dec 11 09:37:37 crc kubenswrapper[4788]: I1211 09:37:37.299475 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" event={"ID":"d16150ce-c1df-425d-b361-f6aba1dba525","Type":"ContainerStarted","Data":"95e65fc70b55835c02669eea72cfeca33d282260f8fd0877dff08f41f568c212"} Dec 11 09:37:38 crc kubenswrapper[4788]: I1211 09:37:38.307495 4788 generic.go:334] "Generic (PLEG): container finished" podID="d16150ce-c1df-425d-b361-f6aba1dba525" containerID="120c45401d3f299b68fe0277b6b1561a165d27d02eb65d34dd8f96660ee1df0a" exitCode=0 Dec 11 09:37:38 crc kubenswrapper[4788]: I1211 09:37:38.307583 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" event={"ID":"d16150ce-c1df-425d-b361-f6aba1dba525","Type":"ContainerDied","Data":"120c45401d3f299b68fe0277b6b1561a165d27d02eb65d34dd8f96660ee1df0a"} Dec 11 09:37:39 crc kubenswrapper[4788]: I1211 09:37:39.328215 4788 generic.go:334] "Generic (PLEG): container finished" podID="d16150ce-c1df-425d-b361-f6aba1dba525" containerID="bd2da7b01843f3e2171676d78303f38e55f0767bc40ecaa762c3c345cae111d8" exitCode=0 Dec 11 09:37:39 crc kubenswrapper[4788]: I1211 09:37:39.328329 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" event={"ID":"d16150ce-c1df-425d-b361-f6aba1dba525","Type":"ContainerDied","Data":"bd2da7b01843f3e2171676d78303f38e55f0767bc40ecaa762c3c345cae111d8"} Dec 11 09:37:40 crc kubenswrapper[4788]: I1211 09:37:40.337048 4788 generic.go:334] "Generic (PLEG): container finished" podID="d16150ce-c1df-425d-b361-f6aba1dba525" containerID="14c563fe000b181cd49aba504dd573953ebb64332025cfa66410bce73d1990fd" exitCode=0 Dec 11 09:37:40 crc kubenswrapper[4788]: I1211 09:37:40.337127 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" event={"ID":"d16150ce-c1df-425d-b361-f6aba1dba525","Type":"ContainerDied","Data":"14c563fe000b181cd49aba504dd573953ebb64332025cfa66410bce73d1990fd"} Dec 11 09:37:41 crc kubenswrapper[4788]: I1211 09:37:41.596165 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:41 crc kubenswrapper[4788]: I1211 09:37:41.701406 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpt8c\" (UniqueName: \"kubernetes.io/projected/d16150ce-c1df-425d-b361-f6aba1dba525-kube-api-access-vpt8c\") pod \"d16150ce-c1df-425d-b361-f6aba1dba525\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " Dec 11 09:37:41 crc kubenswrapper[4788]: I1211 09:37:41.701495 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-util\") pod \"d16150ce-c1df-425d-b361-f6aba1dba525\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " Dec 11 09:37:41 crc kubenswrapper[4788]: I1211 09:37:41.701568 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-bundle\") pod \"d16150ce-c1df-425d-b361-f6aba1dba525\" (UID: \"d16150ce-c1df-425d-b361-f6aba1dba525\") " Dec 11 09:37:41 crc kubenswrapper[4788]: I1211 09:37:41.706771 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d16150ce-c1df-425d-b361-f6aba1dba525-kube-api-access-vpt8c" (OuterVolumeSpecName: "kube-api-access-vpt8c") pod "d16150ce-c1df-425d-b361-f6aba1dba525" (UID: "d16150ce-c1df-425d-b361-f6aba1dba525"). InnerVolumeSpecName "kube-api-access-vpt8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:37:41 crc kubenswrapper[4788]: I1211 09:37:41.716211 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-util" (OuterVolumeSpecName: "util") pod "d16150ce-c1df-425d-b361-f6aba1dba525" (UID: "d16150ce-c1df-425d-b361-f6aba1dba525"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:37:41 crc kubenswrapper[4788]: I1211 09:37:41.803348 4788 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-util\") on node \"crc\" DevicePath \"\"" Dec 11 09:37:41 crc kubenswrapper[4788]: I1211 09:37:41.803378 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpt8c\" (UniqueName: \"kubernetes.io/projected/d16150ce-c1df-425d-b361-f6aba1dba525-kube-api-access-vpt8c\") on node \"crc\" DevicePath \"\"" Dec 11 09:37:42 crc kubenswrapper[4788]: I1211 09:37:42.354856 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" event={"ID":"d16150ce-c1df-425d-b361-f6aba1dba525","Type":"ContainerDied","Data":"95e65fc70b55835c02669eea72cfeca33d282260f8fd0877dff08f41f568c212"} Dec 11 09:37:42 crc kubenswrapper[4788]: I1211 09:37:42.354903 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95e65fc70b55835c02669eea72cfeca33d282260f8fd0877dff08f41f568c212" Dec 11 09:37:42 crc kubenswrapper[4788]: I1211 09:37:42.355350 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg" Dec 11 09:37:43 crc kubenswrapper[4788]: I1211 09:37:43.265335 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-bundle" (OuterVolumeSpecName: "bundle") pod "d16150ce-c1df-425d-b361-f6aba1dba525" (UID: "d16150ce-c1df-425d-b361-f6aba1dba525"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:37:43 crc kubenswrapper[4788]: I1211 09:37:43.324802 4788 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d16150ce-c1df-425d-b361-f6aba1dba525-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.589897 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4"] Dec 11 09:37:47 crc kubenswrapper[4788]: E1211 09:37:47.590628 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d16150ce-c1df-425d-b361-f6aba1dba525" containerName="pull" Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.590645 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d16150ce-c1df-425d-b361-f6aba1dba525" containerName="pull" Dec 11 09:37:47 crc kubenswrapper[4788]: E1211 09:37:47.590672 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d16150ce-c1df-425d-b361-f6aba1dba525" containerName="util" Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.590679 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d16150ce-c1df-425d-b361-f6aba1dba525" containerName="util" Dec 11 09:37:47 crc kubenswrapper[4788]: E1211 09:37:47.590689 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d16150ce-c1df-425d-b361-f6aba1dba525" containerName="extract" Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.590696 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d16150ce-c1df-425d-b361-f6aba1dba525" containerName="extract" Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.590826 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="d16150ce-c1df-425d-b361-f6aba1dba525" containerName="extract" Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.591420 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4" Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.593980 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-hr54b" Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.615861 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4"] Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.685632 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pc4q\" (UniqueName: \"kubernetes.io/projected/3ad7c2ee-56fc-41ad-9f63-2697aa291948-kube-api-access-7pc4q\") pod \"openstack-operator-controller-operator-7dcd4874fd-59fm4\" (UID: \"3ad7c2ee-56fc-41ad-9f63-2697aa291948\") " pod="openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4" Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.787039 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pc4q\" (UniqueName: \"kubernetes.io/projected/3ad7c2ee-56fc-41ad-9f63-2697aa291948-kube-api-access-7pc4q\") pod \"openstack-operator-controller-operator-7dcd4874fd-59fm4\" (UID: \"3ad7c2ee-56fc-41ad-9f63-2697aa291948\") " pod="openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4" Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.809072 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pc4q\" (UniqueName: \"kubernetes.io/projected/3ad7c2ee-56fc-41ad-9f63-2697aa291948-kube-api-access-7pc4q\") pod \"openstack-operator-controller-operator-7dcd4874fd-59fm4\" (UID: \"3ad7c2ee-56fc-41ad-9f63-2697aa291948\") " pod="openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4" Dec 11 09:37:47 crc kubenswrapper[4788]: I1211 09:37:47.916519 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4" Dec 11 09:37:48 crc kubenswrapper[4788]: I1211 09:37:48.371366 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4"] Dec 11 09:37:48 crc kubenswrapper[4788]: I1211 09:37:48.395289 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4" event={"ID":"3ad7c2ee-56fc-41ad-9f63-2697aa291948","Type":"ContainerStarted","Data":"60367036aaf954cf503585523b326d01a65f43e481abc92f178184122e24e562"} Dec 11 09:37:53 crc kubenswrapper[4788]: I1211 09:37:53.426665 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4" event={"ID":"3ad7c2ee-56fc-41ad-9f63-2697aa291948","Type":"ContainerStarted","Data":"b8e6c533180ac8c8b16f54986ccb74c146d2403dce1bc0999a6b2c530601c97d"} Dec 11 09:37:53 crc kubenswrapper[4788]: I1211 09:37:53.427449 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4" Dec 11 09:37:53 crc kubenswrapper[4788]: I1211 09:37:53.459739 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4" podStartSLOduration=1.6731688980000001 podStartE2EDuration="6.459719349s" podCreationTimestamp="2025-12-11 09:37:47 +0000 UTC" firstStartedPulling="2025-12-11 09:37:48.366512336 +0000 UTC m=+998.437291922" lastFinishedPulling="2025-12-11 09:37:53.153062777 +0000 UTC m=+1003.223842373" observedRunningTime="2025-12-11 09:37:53.455128141 +0000 UTC m=+1003.525907737" watchObservedRunningTime="2025-12-11 09:37:53.459719349 +0000 UTC m=+1003.530498935" Dec 11 09:38:07 crc kubenswrapper[4788]: I1211 09:38:07.920841 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7dcd4874fd-59fm4" Dec 11 09:38:31 crc kubenswrapper[4788]: I1211 09:38:31.975508 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt"] Dec 11 09:38:31 crc kubenswrapper[4788]: I1211 09:38:31.977480 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt" Dec 11 09:38:31 crc kubenswrapper[4788]: I1211 09:38:31.981314 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-k7f7v" Dec 11 09:38:31 crc kubenswrapper[4788]: I1211 09:38:31.996642 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.011481 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.012977 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.015591 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-mnbdn" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.032814 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.038415 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8zmn\" (UniqueName: \"kubernetes.io/projected/03f510d3-616e-454c-9086-687604b0cff1-kube-api-access-z8zmn\") pod \"barbican-operator-controller-manager-7d9dfd778-nnspt\" (UID: \"03f510d3-616e-454c-9086-687604b0cff1\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.038470 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbfcc\" (UniqueName: \"kubernetes.io/projected/f2525214-ff81-4638-baa5-afcd178f9ec6-kube-api-access-gbfcc\") pod \"cinder-operator-controller-manager-6c677c69b-qr4bz\" (UID: \"f2525214-ff81-4638-baa5-afcd178f9ec6\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.044307 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-688sb"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.045724 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-688sb" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.049035 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-688sb"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.052002 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-rjf49" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.056293 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.057509 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.060093 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-gr4k4" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.082293 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.083910 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.089741 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-589b8" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.092114 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.109499 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.116408 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.117697 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.125716 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-8hmhb" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.131723 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.132944 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.135658 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-bdb5j" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.135907 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.155400 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8zmn\" (UniqueName: \"kubernetes.io/projected/03f510d3-616e-454c-9086-687604b0cff1-kube-api-access-z8zmn\") pod \"barbican-operator-controller-manager-7d9dfd778-nnspt\" (UID: \"03f510d3-616e-454c-9086-687604b0cff1\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.155439 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbfcc\" (UniqueName: \"kubernetes.io/projected/f2525214-ff81-4638-baa5-afcd178f9ec6-kube-api-access-gbfcc\") pod \"cinder-operator-controller-manager-6c677c69b-qr4bz\" (UID: \"f2525214-ff81-4638-baa5-afcd178f9ec6\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.162306 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.192808 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.194105 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.198273 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbfcc\" (UniqueName: \"kubernetes.io/projected/f2525214-ff81-4638-baa5-afcd178f9ec6-kube-api-access-gbfcc\") pod \"cinder-operator-controller-manager-6c677c69b-qr4bz\" (UID: \"f2525214-ff81-4638-baa5-afcd178f9ec6\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.202011 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-svc7n" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.202182 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.209705 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.211274 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.211808 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8zmn\" (UniqueName: \"kubernetes.io/projected/03f510d3-616e-454c-9086-687604b0cff1-kube-api-access-z8zmn\") pod \"barbican-operator-controller-manager-7d9dfd778-nnspt\" (UID: \"03f510d3-616e-454c-9086-687604b0cff1\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.216189 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-lvzb2" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.226734 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.233983 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.260133 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.261249 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.277621 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-prvjk" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.297260 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdpzx\" (UniqueName: \"kubernetes.io/projected/0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795-kube-api-access-qdpzx\") pod \"heat-operator-controller-manager-5f64f6f8bb-zr4vh\" (UID: \"0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.299438 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxfdk\" (UniqueName: \"kubernetes.io/projected/cb780059-66e2-48f4-913b-271489226ef9-kube-api-access-dxfdk\") pod \"designate-operator-controller-manager-697fb699cf-688sb\" (UID: \"cb780059-66e2-48f4-913b-271489226ef9\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-688sb" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.299505 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert\") pod \"infra-operator-controller-manager-78d48bff9d-b7lzd\" (UID: \"2868074d-eb62-4d8a-b275-047d72fec830\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.319706 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2llmq\" (UniqueName: \"kubernetes.io/projected/348b99db-d5ca-41e2-b2a0-f22f6aeca6b0-kube-api-access-2llmq\") pod \"horizon-operator-controller-manager-68c6d99b8f-p9v6z\" (UID: \"348b99db-d5ca-41e2-b2a0-f22f6aeca6b0\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.319795 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntp55\" (UniqueName: \"kubernetes.io/projected/684c9c50-d818-41d8-852d-82f5937c18ab-kube-api-access-ntp55\") pod \"glance-operator-controller-manager-5697bb5779-kdjld\" (UID: \"684c9c50-d818-41d8-852d-82f5937c18ab\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.319835 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnnm4\" (UniqueName: \"kubernetes.io/projected/2868074d-eb62-4d8a-b275-047d72fec830-kube-api-access-tnnm4\") pod \"infra-operator-controller-manager-78d48bff9d-b7lzd\" (UID: \"2868074d-eb62-4d8a-b275-047d72fec830\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.322741 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.328621 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.330881 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.396216 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.401019 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.405152 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.407810 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.412911 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-l4rs9" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.413143 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-pt822" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.420379 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.421937 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.428733 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2llmq\" (UniqueName: \"kubernetes.io/projected/348b99db-d5ca-41e2-b2a0-f22f6aeca6b0-kube-api-access-2llmq\") pod \"horizon-operator-controller-manager-68c6d99b8f-p9v6z\" (UID: \"348b99db-d5ca-41e2-b2a0-f22f6aeca6b0\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.428820 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntp55\" (UniqueName: \"kubernetes.io/projected/684c9c50-d818-41d8-852d-82f5937c18ab-kube-api-access-ntp55\") pod \"glance-operator-controller-manager-5697bb5779-kdjld\" (UID: \"684c9c50-d818-41d8-852d-82f5937c18ab\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.428850 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnnm4\" (UniqueName: \"kubernetes.io/projected/2868074d-eb62-4d8a-b275-047d72fec830-kube-api-access-tnnm4\") pod \"infra-operator-controller-manager-78d48bff9d-b7lzd\" (UID: \"2868074d-eb62-4d8a-b275-047d72fec830\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.428891 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njsmd\" (UniqueName: \"kubernetes.io/projected/354b87a3-d193-427f-8620-f7fcb52acb67-kube-api-access-njsmd\") pod \"manila-operator-controller-manager-5b5fd79c9c-lcx6f\" (UID: \"354b87a3-d193-427f-8620-f7fcb52acb67\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.428940 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdpzx\" (UniqueName: \"kubernetes.io/projected/0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795-kube-api-access-qdpzx\") pod \"heat-operator-controller-manager-5f64f6f8bb-zr4vh\" (UID: \"0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.428965 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-795p9\" (UniqueName: \"kubernetes.io/projected/7707f72a-2719-46de-8409-b8d397a4ce03-kube-api-access-795p9\") pod \"ironic-operator-controller-manager-967d97867-vzs5b\" (UID: \"7707f72a-2719-46de-8409-b8d397a4ce03\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.428993 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxfdk\" (UniqueName: \"kubernetes.io/projected/cb780059-66e2-48f4-913b-271489226ef9-kube-api-access-dxfdk\") pod \"designate-operator-controller-manager-697fb699cf-688sb\" (UID: \"cb780059-66e2-48f4-913b-271489226ef9\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-688sb" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.429019 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert\") pod \"infra-operator-controller-manager-78d48bff9d-b7lzd\" (UID: \"2868074d-eb62-4d8a-b275-047d72fec830\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.429056 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdrlz\" (UniqueName: \"kubernetes.io/projected/bf947be7-c3ef-4ae6-beff-11d5ae6d1f94-kube-api-access-qdrlz\") pod \"keystone-operator-controller-manager-7765d96ddf-mpztw\" (UID: \"bf947be7-c3ef-4ae6-beff-11d5ae6d1f94\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" Dec 11 09:38:32 crc kubenswrapper[4788]: E1211 09:38:32.429746 4788 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 11 09:38:32 crc kubenswrapper[4788]: E1211 09:38:32.429804 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert podName:2868074d-eb62-4d8a-b275-047d72fec830 nodeName:}" failed. No retries permitted until 2025-12-11 09:38:32.929781979 +0000 UTC m=+1043.000561565 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert") pod "infra-operator-controller-manager-78d48bff9d-b7lzd" (UID: "2868074d-eb62-4d8a-b275-047d72fec830") : secret "infra-operator-webhook-server-cert" not found Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.436269 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-xlt7g" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.457781 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.474363 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2llmq\" (UniqueName: \"kubernetes.io/projected/348b99db-d5ca-41e2-b2a0-f22f6aeca6b0-kube-api-access-2llmq\") pod \"horizon-operator-controller-manager-68c6d99b8f-p9v6z\" (UID: \"348b99db-d5ca-41e2-b2a0-f22f6aeca6b0\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.474513 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntp55\" (UniqueName: \"kubernetes.io/projected/684c9c50-d818-41d8-852d-82f5937c18ab-kube-api-access-ntp55\") pod \"glance-operator-controller-manager-5697bb5779-kdjld\" (UID: \"684c9c50-d818-41d8-852d-82f5937c18ab\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.475105 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxfdk\" (UniqueName: \"kubernetes.io/projected/cb780059-66e2-48f4-913b-271489226ef9-kube-api-access-dxfdk\") pod \"designate-operator-controller-manager-697fb699cf-688sb\" (UID: \"cb780059-66e2-48f4-913b-271489226ef9\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-688sb" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.475722 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdpzx\" (UniqueName: \"kubernetes.io/projected/0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795-kube-api-access-qdpzx\") pod \"heat-operator-controller-manager-5f64f6f8bb-zr4vh\" (UID: \"0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.483662 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnnm4\" (UniqueName: \"kubernetes.io/projected/2868074d-eb62-4d8a-b275-047d72fec830-kube-api-access-tnnm4\") pod \"infra-operator-controller-manager-78d48bff9d-b7lzd\" (UID: \"2868074d-eb62-4d8a-b275-047d72fec830\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.496341 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.538265 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lp7m8\" (UniqueName: \"kubernetes.io/projected/d78d8bad-e298-41b5-82fa-d4cf464d28dd-kube-api-access-lp7m8\") pod \"mariadb-operator-controller-manager-79c8c4686c-dmnk5\" (UID: \"d78d8bad-e298-41b5-82fa-d4cf464d28dd\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.538318 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xxp4\" (UniqueName: \"kubernetes.io/projected/f7a4db33-474d-496e-b745-939ce842904d-kube-api-access-8xxp4\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-hlfq2\" (UID: \"f7a4db33-474d-496e-b745-939ce842904d\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.538370 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9czb7\" (UniqueName: \"kubernetes.io/projected/5fa7cb98-c29a-4efb-81ff-710523478ec0-kube-api-access-9czb7\") pod \"nova-operator-controller-manager-697bc559fc-pkxtw\" (UID: \"5fa7cb98-c29a-4efb-81ff-710523478ec0\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.538406 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njsmd\" (UniqueName: \"kubernetes.io/projected/354b87a3-d193-427f-8620-f7fcb52acb67-kube-api-access-njsmd\") pod \"manila-operator-controller-manager-5b5fd79c9c-lcx6f\" (UID: \"354b87a3-d193-427f-8620-f7fcb52acb67\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.538465 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-795p9\" (UniqueName: \"kubernetes.io/projected/7707f72a-2719-46de-8409-b8d397a4ce03-kube-api-access-795p9\") pod \"ironic-operator-controller-manager-967d97867-vzs5b\" (UID: \"7707f72a-2719-46de-8409-b8d397a4ce03\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.538523 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdrlz\" (UniqueName: \"kubernetes.io/projected/bf947be7-c3ef-4ae6-beff-11d5ae6d1f94-kube-api-access-qdrlz\") pod \"keystone-operator-controller-manager-7765d96ddf-mpztw\" (UID: \"bf947be7-c3ef-4ae6-beff-11d5ae6d1f94\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.563855 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.570439 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdrlz\" (UniqueName: \"kubernetes.io/projected/bf947be7-c3ef-4ae6-beff-11d5ae6d1f94-kube-api-access-qdrlz\") pod \"keystone-operator-controller-manager-7765d96ddf-mpztw\" (UID: \"bf947be7-c3ef-4ae6-beff-11d5ae6d1f94\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.586604 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njsmd\" (UniqueName: \"kubernetes.io/projected/354b87a3-d193-427f-8620-f7fcb52acb67-kube-api-access-njsmd\") pod \"manila-operator-controller-manager-5b5fd79c9c-lcx6f\" (UID: \"354b87a3-d193-427f-8620-f7fcb52acb67\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.592320 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-795p9\" (UniqueName: \"kubernetes.io/projected/7707f72a-2719-46de-8409-b8d397a4ce03-kube-api-access-795p9\") pod \"ironic-operator-controller-manager-967d97867-vzs5b\" (UID: \"7707f72a-2719-46de-8409-b8d397a4ce03\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.595716 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.606582 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.618613 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-brq4g"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.626586 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-brq4g" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.632796 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.637701 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-qrqkr" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.641305 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9czb7\" (UniqueName: \"kubernetes.io/projected/5fa7cb98-c29a-4efb-81ff-710523478ec0-kube-api-access-9czb7\") pod \"nova-operator-controller-manager-697bc559fc-pkxtw\" (UID: \"5fa7cb98-c29a-4efb-81ff-710523478ec0\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.641536 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lp7m8\" (UniqueName: \"kubernetes.io/projected/d78d8bad-e298-41b5-82fa-d4cf464d28dd-kube-api-access-lp7m8\") pod \"mariadb-operator-controller-manager-79c8c4686c-dmnk5\" (UID: \"d78d8bad-e298-41b5-82fa-d4cf464d28dd\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.641563 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xxp4\" (UniqueName: \"kubernetes.io/projected/f7a4db33-474d-496e-b745-939ce842904d-kube-api-access-8xxp4\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-hlfq2\" (UID: \"f7a4db33-474d-496e-b745-939ce842904d\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.645700 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.646853 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.650704 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.656087 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-b6lrc" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.656318 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-ptc6p" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.672342 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.673447 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.674002 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-688sb" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.676467 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9czb7\" (UniqueName: \"kubernetes.io/projected/5fa7cb98-c29a-4efb-81ff-710523478ec0-kube-api-access-9czb7\") pod \"nova-operator-controller-manager-697bc559fc-pkxtw\" (UID: \"5fa7cb98-c29a-4efb-81ff-710523478ec0\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.683977 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.689208 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xxp4\" (UniqueName: \"kubernetes.io/projected/f7a4db33-474d-496e-b745-939ce842904d-kube-api-access-8xxp4\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-hlfq2\" (UID: \"f7a4db33-474d-496e-b745-939ce842904d\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.696941 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-brq4g"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.717811 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-gz22l"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.723642 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lp7m8\" (UniqueName: \"kubernetes.io/projected/d78d8bad-e298-41b5-82fa-d4cf464d28dd-kube-api-access-lp7m8\") pod \"mariadb-operator-controller-manager-79c8c4686c-dmnk5\" (UID: \"d78d8bad-e298-41b5-82fa-d4cf464d28dd\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.728111 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.732539 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-gz22l" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.739938 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-gtwsl" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.756665 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.757368 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7qmd\" (UniqueName: \"kubernetes.io/projected/c0d61f31-e8b5-454d-8961-cedc33a2efa2-kube-api-access-m7qmd\") pod \"ovn-operator-controller-manager-b6456fdb6-zqgjl\" (UID: \"c0d61f31-e8b5-454d-8961-cedc33a2efa2\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.757416 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879ff2lw9\" (UID: \"6491a596-c46a-45c7-9430-4d9f6a40a6d2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.757456 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gznv5\" (UniqueName: \"kubernetes.io/projected/6491a596-c46a-45c7-9430-4d9f6a40a6d2-kube-api-access-gznv5\") pod \"openstack-baremetal-operator-controller-manager-84b575879ff2lw9\" (UID: \"6491a596-c46a-45c7-9430-4d9f6a40a6d2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.757513 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dscbq\" (UniqueName: \"kubernetes.io/projected/556f2b13-91d6-4261-9e7a-bed452e436eb-kube-api-access-dscbq\") pod \"octavia-operator-controller-manager-998648c74-brq4g\" (UID: \"556f2b13-91d6-4261-9e7a-bed452e436eb\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-brq4g" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.782440 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.783823 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.812314 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.814888 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-nvrst" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.855309 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.856706 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.859897 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcfjr\" (UniqueName: \"kubernetes.io/projected/f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d-kube-api-access-rcfjr\") pod \"placement-operator-controller-manager-78f8948974-gz22l\" (UID: \"f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-gz22l" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.859952 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dscbq\" (UniqueName: \"kubernetes.io/projected/556f2b13-91d6-4261-9e7a-bed452e436eb-kube-api-access-dscbq\") pod \"octavia-operator-controller-manager-998648c74-brq4g\" (UID: \"556f2b13-91d6-4261-9e7a-bed452e436eb\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-brq4g" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.860043 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwpvt\" (UniqueName: \"kubernetes.io/projected/7e94e3ad-b1bf-44e3-aa17-52380cb0e651-kube-api-access-lwpvt\") pod \"swift-operator-controller-manager-9d58d64bc-v87tj\" (UID: \"7e94e3ad-b1bf-44e3-aa17-52380cb0e651\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.860080 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7qmd\" (UniqueName: \"kubernetes.io/projected/c0d61f31-e8b5-454d-8961-cedc33a2efa2-kube-api-access-m7qmd\") pod \"ovn-operator-controller-manager-b6456fdb6-zqgjl\" (UID: \"c0d61f31-e8b5-454d-8961-cedc33a2efa2\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.860113 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879ff2lw9\" (UID: \"6491a596-c46a-45c7-9430-4d9f6a40a6d2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.860141 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gznv5\" (UniqueName: \"kubernetes.io/projected/6491a596-c46a-45c7-9430-4d9f6a40a6d2-kube-api-access-gznv5\") pod \"openstack-baremetal-operator-controller-manager-84b575879ff2lw9\" (UID: \"6491a596-c46a-45c7-9430-4d9f6a40a6d2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:32 crc kubenswrapper[4788]: E1211 09:38:32.860527 4788 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 11 09:38:32 crc kubenswrapper[4788]: E1211 09:38:32.860583 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert podName:6491a596-c46a-45c7-9430-4d9f6a40a6d2 nodeName:}" failed. No retries permitted until 2025-12-11 09:38:33.360565863 +0000 UTC m=+1043.431345449 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert") pod "openstack-baremetal-operator-controller-manager-84b575879ff2lw9" (UID: "6491a596-c46a-45c7-9430-4d9f6a40a6d2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.887668 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.888792 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.889420 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.890163 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.892129 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-n99mt" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.910384 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.922499 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dscbq\" (UniqueName: \"kubernetes.io/projected/556f2b13-91d6-4261-9e7a-bed452e436eb-kube-api-access-dscbq\") pod \"octavia-operator-controller-manager-998648c74-brq4g\" (UID: \"556f2b13-91d6-4261-9e7a-bed452e436eb\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-brq4g" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.923166 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gznv5\" (UniqueName: \"kubernetes.io/projected/6491a596-c46a-45c7-9430-4d9f6a40a6d2-kube-api-access-gznv5\") pod \"openstack-baremetal-operator-controller-manager-84b575879ff2lw9\" (UID: \"6491a596-c46a-45c7-9430-4d9f6a40a6d2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.926860 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7qmd\" (UniqueName: \"kubernetes.io/projected/c0d61f31-e8b5-454d-8961-cedc33a2efa2-kube-api-access-m7qmd\") pod \"ovn-operator-controller-manager-b6456fdb6-zqgjl\" (UID: \"c0d61f31-e8b5-454d-8961-cedc33a2efa2\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.927497 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-gz22l"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.935574 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.937159 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.941759 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-7cfph" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.943509 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22"] Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.976211 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert\") pod \"infra-operator-controller-manager-78d48bff9d-b7lzd\" (UID: \"2868074d-eb62-4d8a-b275-047d72fec830\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:32 crc kubenswrapper[4788]: E1211 09:38:32.976958 4788 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 11 09:38:32 crc kubenswrapper[4788]: E1211 09:38:32.977193 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert podName:2868074d-eb62-4d8a-b275-047d72fec830 nodeName:}" failed. No retries permitted until 2025-12-11 09:38:33.977023849 +0000 UTC m=+1044.047803435 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert") pod "infra-operator-controller-manager-78d48bff9d-b7lzd" (UID: "2868074d-eb62-4d8a-b275-047d72fec830") : secret "infra-operator-webhook-server-cert" not found Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.977258 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cj8t9\" (UniqueName: \"kubernetes.io/projected/f6e78ec3-ccc1-48b2-8ba6-962de2a25249-kube-api-access-cj8t9\") pod \"telemetry-operator-controller-manager-58d5ff84df-sxb22\" (UID: \"f6e78ec3-ccc1-48b2-8ba6-962de2a25249\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.977384 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcfjr\" (UniqueName: \"kubernetes.io/projected/f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d-kube-api-access-rcfjr\") pod \"placement-operator-controller-manager-78f8948974-gz22l\" (UID: \"f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-gz22l" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.977653 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwpvt\" (UniqueName: \"kubernetes.io/projected/7e94e3ad-b1bf-44e3-aa17-52380cb0e651-kube-api-access-lwpvt\") pod \"swift-operator-controller-manager-9d58d64bc-v87tj\" (UID: \"7e94e3ad-b1bf-44e3-aa17-52380cb0e651\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" Dec 11 09:38:32 crc kubenswrapper[4788]: I1211 09:38:32.995027 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.020058 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.039931 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-brq4g" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.045126 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwpvt\" (UniqueName: \"kubernetes.io/projected/7e94e3ad-b1bf-44e3-aa17-52380cb0e651-kube-api-access-lwpvt\") pod \"swift-operator-controller-manager-9d58d64bc-v87tj\" (UID: \"7e94e3ad-b1bf-44e3-aa17-52380cb0e651\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.058673 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcfjr\" (UniqueName: \"kubernetes.io/projected/f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d-kube-api-access-rcfjr\") pod \"placement-operator-controller-manager-78f8948974-gz22l\" (UID: \"f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-gz22l" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.062449 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.067097 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.075509 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-vv4rb" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.084576 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cs57\" (UniqueName: \"kubernetes.io/projected/726d9db1-d370-4bea-b91d-6beff7ba4b6b-kube-api-access-2cs57\") pod \"test-operator-controller-manager-5854674fcc-z7q4w\" (UID: \"726d9db1-d370-4bea-b91d-6beff7ba4b6b\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.088586 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cj8t9\" (UniqueName: \"kubernetes.io/projected/f6e78ec3-ccc1-48b2-8ba6-962de2a25249-kube-api-access-cj8t9\") pod \"telemetry-operator-controller-manager-58d5ff84df-sxb22\" (UID: \"f6e78ec3-ccc1-48b2-8ba6-962de2a25249\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.096862 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.119349 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cj8t9\" (UniqueName: \"kubernetes.io/projected/f6e78ec3-ccc1-48b2-8ba6-962de2a25249-kube-api-access-cj8t9\") pod \"telemetry-operator-controller-manager-58d5ff84df-sxb22\" (UID: \"f6e78ec3-ccc1-48b2-8ba6-962de2a25249\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.128748 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-gz22l" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.152826 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.192055 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29w2t\" (UniqueName: \"kubernetes.io/projected/ce133f4a-b1fd-4e51-8e4f-390d6f125e1d-kube-api-access-29w2t\") pod \"watcher-operator-controller-manager-75944c9b7-kggr7\" (UID: \"ce133f4a-b1fd-4e51-8e4f-390d6f125e1d\") " pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.192394 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cs57\" (UniqueName: \"kubernetes.io/projected/726d9db1-d370-4bea-b91d-6beff7ba4b6b-kube-api-access-2cs57\") pod \"test-operator-controller-manager-5854674fcc-z7q4w\" (UID: \"726d9db1-d370-4bea-b91d-6beff7ba4b6b\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.258101 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.259891 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.263119 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-vgqsj" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.263334 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.264339 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.271734 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cs57\" (UniqueName: \"kubernetes.io/projected/726d9db1-d370-4bea-b91d-6beff7ba4b6b-kube-api-access-2cs57\") pod \"test-operator-controller-manager-5854674fcc-z7q4w\" (UID: \"726d9db1-d370-4bea-b91d-6beff7ba4b6b\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.294717 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29w2t\" (UniqueName: \"kubernetes.io/projected/ce133f4a-b1fd-4e51-8e4f-390d6f125e1d-kube-api-access-29w2t\") pod \"watcher-operator-controller-manager-75944c9b7-kggr7\" (UID: \"ce133f4a-b1fd-4e51-8e4f-390d6f125e1d\") " pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.296767 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.325473 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.330842 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29w2t\" (UniqueName: \"kubernetes.io/projected/ce133f4a-b1fd-4e51-8e4f-390d6f125e1d-kube-api-access-29w2t\") pod \"watcher-operator-controller-manager-75944c9b7-kggr7\" (UID: \"ce133f4a-b1fd-4e51-8e4f-390d6f125e1d\") " pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.346867 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.382626 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.383929 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.384088 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.387285 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-g2ckf" Dec 11 09:38:33 crc kubenswrapper[4788]: W1211 09:38:33.396531 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf947be7_c3ef_4ae6_beff_11d5ae6d1f94.slice/crio-f42c12f385e341779cef2ecf417dcdab5466f19e6e7bdcdd3d1b473288168592 WatchSource:0}: Error finding container f42c12f385e341779cef2ecf417dcdab5466f19e6e7bdcdd3d1b473288168592: Status 404 returned error can't find the container with id f42c12f385e341779cef2ecf417dcdab5466f19e6e7bdcdd3d1b473288168592 Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.397483 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdc2s\" (UniqueName: \"kubernetes.io/projected/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-kube-api-access-xdc2s\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.397536 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.397600 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879ff2lw9\" (UID: \"6491a596-c46a-45c7-9430-4d9f6a40a6d2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.397622 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:33 crc kubenswrapper[4788]: E1211 09:38:33.397790 4788 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 11 09:38:33 crc kubenswrapper[4788]: E1211 09:38:33.397834 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert podName:6491a596-c46a-45c7-9430-4d9f6a40a6d2 nodeName:}" failed. No retries permitted until 2025-12-11 09:38:34.397819627 +0000 UTC m=+1044.468599213 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert") pod "openstack-baremetal-operator-controller-manager-84b575879ff2lw9" (UID: "6491a596-c46a-45c7-9430-4d9f6a40a6d2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.411529 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.414172 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.436173 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.503324 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.504673 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.504850 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzt5x\" (UniqueName: \"kubernetes.io/projected/67082483-5ed3-4141-a7be-d3f95f5b07c4-kube-api-access-bzt5x\") pod \"rabbitmq-cluster-operator-manager-668c99d594-gbfnb\" (UID: \"67082483-5ed3-4141-a7be-d3f95f5b07c4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.504951 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdc2s\" (UniqueName: \"kubernetes.io/projected/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-kube-api-access-xdc2s\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.505010 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:33 crc kubenswrapper[4788]: E1211 09:38:33.505306 4788 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 11 09:38:33 crc kubenswrapper[4788]: E1211 09:38:33.505381 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs podName:f8ee8e1d-0519-4464-9ca1-17d37770dcdc nodeName:}" failed. No retries permitted until 2025-12-11 09:38:34.005356974 +0000 UTC m=+1044.076136590 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs") pod "openstack-operator-controller-manager-65d64bdc49-5hs5g" (UID: "f8ee8e1d-0519-4464-9ca1-17d37770dcdc") : secret "webhook-server-cert" not found Dec 11 09:38:33 crc kubenswrapper[4788]: E1211 09:38:33.505495 4788 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 11 09:38:33 crc kubenswrapper[4788]: E1211 09:38:33.505571 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs podName:f8ee8e1d-0519-4464-9ca1-17d37770dcdc nodeName:}" failed. No retries permitted until 2025-12-11 09:38:34.005551088 +0000 UTC m=+1044.076330684 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs") pod "openstack-operator-controller-manager-65d64bdc49-5hs5g" (UID: "f8ee8e1d-0519-4464-9ca1-17d37770dcdc") : secret "metrics-server-cert" not found Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.534492 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdc2s\" (UniqueName: \"kubernetes.io/projected/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-kube-api-access-xdc2s\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.607014 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzt5x\" (UniqueName: \"kubernetes.io/projected/67082483-5ed3-4141-a7be-d3f95f5b07c4-kube-api-access-bzt5x\") pod \"rabbitmq-cluster-operator-manager-668c99d594-gbfnb\" (UID: \"67082483-5ed3-4141-a7be-d3f95f5b07c4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.635520 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzt5x\" (UniqueName: \"kubernetes.io/projected/67082483-5ed3-4141-a7be-d3f95f5b07c4-kube-api-access-bzt5x\") pod \"rabbitmq-cluster-operator-manager-668c99d594-gbfnb\" (UID: \"67082483-5ed3-4141-a7be-d3f95f5b07c4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.684618 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.697297 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.701939 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" event={"ID":"bf947be7-c3ef-4ae6-beff-11d5ae6d1f94","Type":"ContainerStarted","Data":"f42c12f385e341779cef2ecf417dcdab5466f19e6e7bdcdd3d1b473288168592"} Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.702705 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt" event={"ID":"03f510d3-616e-454c-9086-687604b0cff1","Type":"ContainerStarted","Data":"c205500dda94b2d69e14ee63413f1a165a24c9ad792588053762a8d1d0b8de8d"} Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.703457 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" event={"ID":"f2525214-ff81-4638-baa5-afcd178f9ec6","Type":"ContainerStarted","Data":"20d37713df1635f04e9b6c8af1867d099ad0fbc80f97a24695dd8a2223a5785e"} Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.780345 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb" Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.796520 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-688sb"] Dec 11 09:38:33 crc kubenswrapper[4788]: W1211 09:38:33.803206 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb780059_66e2_48f4_913b_271489226ef9.slice/crio-d6bac33d1a083d8b6348533405e5a81adc3272cddc70d308e7d24d436f43947a WatchSource:0}: Error finding container d6bac33d1a083d8b6348533405e5a81adc3272cddc70d308e7d24d436f43947a: Status 404 returned error can't find the container with id d6bac33d1a083d8b6348533405e5a81adc3272cddc70d308e7d24d436f43947a Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.928857 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z"] Dec 11 09:38:33 crc kubenswrapper[4788]: I1211 09:38:33.939272 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5"] Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.016090 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.016153 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert\") pod \"infra-operator-controller-manager-78d48bff9d-b7lzd\" (UID: \"2868074d-eb62-4d8a-b275-047d72fec830\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.016199 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.016416 4788 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.016479 4788 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.016573 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs podName:f8ee8e1d-0519-4464-9ca1-17d37770dcdc nodeName:}" failed. No retries permitted until 2025-12-11 09:38:35.016514938 +0000 UTC m=+1045.087294604 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs") pod "openstack-operator-controller-manager-65d64bdc49-5hs5g" (UID: "f8ee8e1d-0519-4464-9ca1-17d37770dcdc") : secret "webhook-server-cert" not found Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.016598 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert podName:2868074d-eb62-4d8a-b275-047d72fec830 nodeName:}" failed. No retries permitted until 2025-12-11 09:38:36.0165881 +0000 UTC m=+1046.087367796 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert") pod "infra-operator-controller-manager-78d48bff9d-b7lzd" (UID: "2868074d-eb62-4d8a-b275-047d72fec830") : secret "infra-operator-webhook-server-cert" not found Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.016438 4788 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.016672 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs podName:f8ee8e1d-0519-4464-9ca1-17d37770dcdc nodeName:}" failed. No retries permitted until 2025-12-11 09:38:35.016663182 +0000 UTC m=+1045.087442878 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs") pod "openstack-operator-controller-manager-65d64bdc49-5hs5g" (UID: "f8ee8e1d-0519-4464-9ca1-17d37770dcdc") : secret "metrics-server-cert" not found Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.046941 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw"] Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.060269 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld"] Dec 11 09:38:34 crc kubenswrapper[4788]: W1211 09:38:34.072644 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod684c9c50_d818_41d8_852d_82f5937c18ab.slice/crio-8f9affd9f6328834e7f0cce170bfeba7f226cb59a3bb12575d539cc2e9c67889 WatchSource:0}: Error finding container 8f9affd9f6328834e7f0cce170bfeba7f226cb59a3bb12575d539cc2e9c67889: Status 404 returned error can't find the container with id 8f9affd9f6328834e7f0cce170bfeba7f226cb59a3bb12575d539cc2e9c67889 Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.158085 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl"] Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.167578 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b"] Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.181403 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-brq4g"] Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.189344 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-gz22l"] Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.230025 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2"] Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.346789 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2cs57,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-z7q4w_openstack-operators(726d9db1-d370-4bea-b91d-6beff7ba4b6b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.350173 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2cs57,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-z7q4w_openstack-operators(726d9db1-d370-4bea-b91d-6beff7ba4b6b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.352060 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" podUID="726d9db1-d370-4bea-b91d-6beff7ba4b6b" Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.353758 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w"] Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.367202 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj"] Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.379426 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7"] Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.379622 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lwpvt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-v87tj_openstack-operators(7e94e3ad-b1bf-44e3-aa17-52380cb0e651): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.383761 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lwpvt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-v87tj_openstack-operators(7e94e3ad-b1bf-44e3-aa17-52380cb0e651): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.385611 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" podUID="7e94e3ad-b1bf-44e3-aa17-52380cb0e651" Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.391501 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22"] Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.393055 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cj8t9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-sxb22_openstack-operators(f6e78ec3-ccc1-48b2-8ba6-962de2a25249): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.396120 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cj8t9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-sxb22_openstack-operators(f6e78ec3-ccc1-48b2-8ba6-962de2a25249): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.397353 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" podUID="f6e78ec3-ccc1-48b2-8ba6-962de2a25249" Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.429622 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879ff2lw9\" (UID: \"6491a596-c46a-45c7-9430-4d9f6a40a6d2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.429999 4788 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.430075 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert podName:6491a596-c46a-45c7-9430-4d9f6a40a6d2 nodeName:}" failed. No retries permitted until 2025-12-11 09:38:36.43005071 +0000 UTC m=+1046.500830296 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert") pod "openstack-baremetal-operator-controller-manager-84b575879ff2lw9" (UID: "6491a596-c46a-45c7-9430-4d9f6a40a6d2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.488815 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb"] Dec 11 09:38:34 crc kubenswrapper[4788]: W1211 09:38:34.508516 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67082483_5ed3_4141_a7be_d3f95f5b07c4.slice/crio-82273dbd1b62f0c3097f3b9d8ae8eafbdfe2c8002dc7e75ee87c71f7720ee684 WatchSource:0}: Error finding container 82273dbd1b62f0c3097f3b9d8ae8eafbdfe2c8002dc7e75ee87c71f7720ee684: Status 404 returned error can't find the container with id 82273dbd1b62f0c3097f3b9d8ae8eafbdfe2c8002dc7e75ee87c71f7720ee684 Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.512861 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bzt5x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-gbfnb_openstack-operators(67082483-5ed3-4141-a7be-d3f95f5b07c4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.514189 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb" podUID="67082483-5ed3-4141-a7be-d3f95f5b07c4" Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.713954 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" event={"ID":"f6e78ec3-ccc1-48b2-8ba6-962de2a25249","Type":"ContainerStarted","Data":"5aca578a9387bc1babaccb0534a727dd28f17b0823a50576073712caa811b2ef"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.716039 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw" event={"ID":"5fa7cb98-c29a-4efb-81ff-710523478ec0","Type":"ContainerStarted","Data":"e9f0eac0072465b0181ab6354e4ac155956cf1af0be800ab10d1557900a37546"} Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.718673 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" podUID="f6e78ec3-ccc1-48b2-8ba6-962de2a25249" Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.720859 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7" event={"ID":"ce133f4a-b1fd-4e51-8e4f-390d6f125e1d","Type":"ContainerStarted","Data":"b8c31ede1cf5cb093f427a8c76b1b4c1195a150da76bcb77ace1558dab07e8ab"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.735220 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld" event={"ID":"684c9c50-d818-41d8-852d-82f5937c18ab","Type":"ContainerStarted","Data":"8f9affd9f6328834e7f0cce170bfeba7f226cb59a3bb12575d539cc2e9c67889"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.740301 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-688sb" event={"ID":"cb780059-66e2-48f4-913b-271489226ef9","Type":"ContainerStarted","Data":"d6bac33d1a083d8b6348533405e5a81adc3272cddc70d308e7d24d436f43947a"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.750037 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" event={"ID":"7707f72a-2719-46de-8409-b8d397a4ce03","Type":"ContainerStarted","Data":"ce6a5566da7e16bd955384c955c2912cc0efd92e1862fd6a9f429e06df153569"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.754212 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" event={"ID":"726d9db1-d370-4bea-b91d-6beff7ba4b6b","Type":"ContainerStarted","Data":"4797b7b7dda274906a085811b55722f579223e9ce8f3b148107e5ec7ed64e6c4"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.759022 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" event={"ID":"354b87a3-d193-427f-8620-f7fcb52acb67","Type":"ContainerStarted","Data":"15e0e76397277a3a1da57f391e136cb7a66b9e8977ccaabf913925a49eee8d49"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.761429 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh" event={"ID":"0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795","Type":"ContainerStarted","Data":"8eb038f622acfae9c70ae1d6a1d6c310c2184218a132ec01d18c92ff45b363bf"} Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.763118 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" podUID="726d9db1-d370-4bea-b91d-6beff7ba4b6b" Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.764028 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z" event={"ID":"348b99db-d5ca-41e2-b2a0-f22f6aeca6b0","Type":"ContainerStarted","Data":"7bc85da6bff291a6644394fdbc46b102d6eeb2f87a47a369fb9a4c7d932e69f4"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.778066 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb" event={"ID":"67082483-5ed3-4141-a7be-d3f95f5b07c4","Type":"ContainerStarted","Data":"82273dbd1b62f0c3097f3b9d8ae8eafbdfe2c8002dc7e75ee87c71f7720ee684"} Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.781994 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb" podUID="67082483-5ed3-4141-a7be-d3f95f5b07c4" Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.784325 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" event={"ID":"c0d61f31-e8b5-454d-8961-cedc33a2efa2","Type":"ContainerStarted","Data":"e1288738f01a6c966e09b659bc630d5f649cd81ffeb0162b21556fb971e9b5db"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.790369 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" event={"ID":"d78d8bad-e298-41b5-82fa-d4cf464d28dd","Type":"ContainerStarted","Data":"438d0691bbd7f3ee711a9f8205be8d3d0f0130491c98d725754b83b547c6970d"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.796346 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" event={"ID":"7e94e3ad-b1bf-44e3-aa17-52380cb0e651","Type":"ContainerStarted","Data":"4d7a2a526bf33ec8b0872762591aa24c22cc538ed4e40ae16f9cd6375ac20a53"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.801109 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" event={"ID":"f7a4db33-474d-496e-b745-939ce842904d","Type":"ContainerStarted","Data":"88224e271c06b9446d1bd3368e503bd3762a21af3255601fe437f76182513ea7"} Dec 11 09:38:34 crc kubenswrapper[4788]: E1211 09:38:34.805753 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" podUID="7e94e3ad-b1bf-44e3-aa17-52380cb0e651" Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.805776 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-brq4g" event={"ID":"556f2b13-91d6-4261-9e7a-bed452e436eb","Type":"ContainerStarted","Data":"509d6840b7648f77dd1305be2f127fd6da18b77b3c37bc89456863b0bbc8635a"} Dec 11 09:38:34 crc kubenswrapper[4788]: I1211 09:38:34.807458 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-gz22l" event={"ID":"f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d","Type":"ContainerStarted","Data":"59d53a6f24d6dd66a5b94fae1f0091d017c4350eb17e4a92398d45101de77b4a"} Dec 11 09:38:35 crc kubenswrapper[4788]: I1211 09:38:35.043985 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:35 crc kubenswrapper[4788]: I1211 09:38:35.044077 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:35 crc kubenswrapper[4788]: E1211 09:38:35.044374 4788 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 11 09:38:35 crc kubenswrapper[4788]: E1211 09:38:35.044427 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs podName:f8ee8e1d-0519-4464-9ca1-17d37770dcdc nodeName:}" failed. No retries permitted until 2025-12-11 09:38:37.044411189 +0000 UTC m=+1047.115190775 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs") pod "openstack-operator-controller-manager-65d64bdc49-5hs5g" (UID: "f8ee8e1d-0519-4464-9ca1-17d37770dcdc") : secret "metrics-server-cert" not found Dec 11 09:38:35 crc kubenswrapper[4788]: E1211 09:38:35.044908 4788 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 11 09:38:35 crc kubenswrapper[4788]: E1211 09:38:35.045012 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs podName:f8ee8e1d-0519-4464-9ca1-17d37770dcdc nodeName:}" failed. No retries permitted until 2025-12-11 09:38:37.044987854 +0000 UTC m=+1047.115767510 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs") pod "openstack-operator-controller-manager-65d64bdc49-5hs5g" (UID: "f8ee8e1d-0519-4464-9ca1-17d37770dcdc") : secret "webhook-server-cert" not found Dec 11 09:38:35 crc kubenswrapper[4788]: E1211 09:38:35.822621 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb" podUID="67082483-5ed3-4141-a7be-d3f95f5b07c4" Dec 11 09:38:35 crc kubenswrapper[4788]: E1211 09:38:35.824145 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" podUID="f6e78ec3-ccc1-48b2-8ba6-962de2a25249" Dec 11 09:38:35 crc kubenswrapper[4788]: E1211 09:38:35.824358 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" podUID="7e94e3ad-b1bf-44e3-aa17-52380cb0e651" Dec 11 09:38:35 crc kubenswrapper[4788]: E1211 09:38:35.824713 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" podUID="726d9db1-d370-4bea-b91d-6beff7ba4b6b" Dec 11 09:38:36 crc kubenswrapper[4788]: I1211 09:38:36.071641 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert\") pod \"infra-operator-controller-manager-78d48bff9d-b7lzd\" (UID: \"2868074d-eb62-4d8a-b275-047d72fec830\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:36 crc kubenswrapper[4788]: E1211 09:38:36.071928 4788 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 11 09:38:36 crc kubenswrapper[4788]: E1211 09:38:36.072003 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert podName:2868074d-eb62-4d8a-b275-047d72fec830 nodeName:}" failed. No retries permitted until 2025-12-11 09:38:40.071978833 +0000 UTC m=+1050.142758409 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert") pod "infra-operator-controller-manager-78d48bff9d-b7lzd" (UID: "2868074d-eb62-4d8a-b275-047d72fec830") : secret "infra-operator-webhook-server-cert" not found Dec 11 09:38:36 crc kubenswrapper[4788]: I1211 09:38:36.495503 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879ff2lw9\" (UID: \"6491a596-c46a-45c7-9430-4d9f6a40a6d2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:36 crc kubenswrapper[4788]: E1211 09:38:36.495714 4788 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 11 09:38:36 crc kubenswrapper[4788]: E1211 09:38:36.495829 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert podName:6491a596-c46a-45c7-9430-4d9f6a40a6d2 nodeName:}" failed. No retries permitted until 2025-12-11 09:38:40.495810898 +0000 UTC m=+1050.566590484 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert") pod "openstack-baremetal-operator-controller-manager-84b575879ff2lw9" (UID: "6491a596-c46a-45c7-9430-4d9f6a40a6d2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 11 09:38:37 crc kubenswrapper[4788]: I1211 09:38:37.107316 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:37 crc kubenswrapper[4788]: E1211 09:38:37.107624 4788 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 11 09:38:37 crc kubenswrapper[4788]: E1211 09:38:37.107862 4788 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 11 09:38:37 crc kubenswrapper[4788]: I1211 09:38:37.107770 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:37 crc kubenswrapper[4788]: E1211 09:38:37.107916 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs podName:f8ee8e1d-0519-4464-9ca1-17d37770dcdc nodeName:}" failed. No retries permitted until 2025-12-11 09:38:41.10789007 +0000 UTC m=+1051.178669656 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs") pod "openstack-operator-controller-manager-65d64bdc49-5hs5g" (UID: "f8ee8e1d-0519-4464-9ca1-17d37770dcdc") : secret "metrics-server-cert" not found Dec 11 09:38:37 crc kubenswrapper[4788]: E1211 09:38:37.107943 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs podName:f8ee8e1d-0519-4464-9ca1-17d37770dcdc nodeName:}" failed. No retries permitted until 2025-12-11 09:38:41.107934811 +0000 UTC m=+1051.178714507 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs") pod "openstack-operator-controller-manager-65d64bdc49-5hs5g" (UID: "f8ee8e1d-0519-4464-9ca1-17d37770dcdc") : secret "webhook-server-cert" not found Dec 11 09:38:40 crc kubenswrapper[4788]: I1211 09:38:40.162267 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert\") pod \"infra-operator-controller-manager-78d48bff9d-b7lzd\" (UID: \"2868074d-eb62-4d8a-b275-047d72fec830\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:40 crc kubenswrapper[4788]: E1211 09:38:40.162532 4788 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 11 09:38:40 crc kubenswrapper[4788]: E1211 09:38:40.162692 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert podName:2868074d-eb62-4d8a-b275-047d72fec830 nodeName:}" failed. No retries permitted until 2025-12-11 09:38:48.162654414 +0000 UTC m=+1058.233434000 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert") pod "infra-operator-controller-manager-78d48bff9d-b7lzd" (UID: "2868074d-eb62-4d8a-b275-047d72fec830") : secret "infra-operator-webhook-server-cert" not found Dec 11 09:38:40 crc kubenswrapper[4788]: I1211 09:38:40.569491 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879ff2lw9\" (UID: \"6491a596-c46a-45c7-9430-4d9f6a40a6d2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:40 crc kubenswrapper[4788]: E1211 09:38:40.569778 4788 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 11 09:38:40 crc kubenswrapper[4788]: E1211 09:38:40.570105 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert podName:6491a596-c46a-45c7-9430-4d9f6a40a6d2 nodeName:}" failed. No retries permitted until 2025-12-11 09:38:48.570040008 +0000 UTC m=+1058.640819594 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert") pod "openstack-baremetal-operator-controller-manager-84b575879ff2lw9" (UID: "6491a596-c46a-45c7-9430-4d9f6a40a6d2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 11 09:38:41 crc kubenswrapper[4788]: I1211 09:38:41.179840 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:41 crc kubenswrapper[4788]: I1211 09:38:41.180266 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:41 crc kubenswrapper[4788]: E1211 09:38:41.180103 4788 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 11 09:38:41 crc kubenswrapper[4788]: E1211 09:38:41.180423 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs podName:f8ee8e1d-0519-4464-9ca1-17d37770dcdc nodeName:}" failed. No retries permitted until 2025-12-11 09:38:49.180387075 +0000 UTC m=+1059.251166661 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs") pod "openstack-operator-controller-manager-65d64bdc49-5hs5g" (UID: "f8ee8e1d-0519-4464-9ca1-17d37770dcdc") : secret "webhook-server-cert" not found Dec 11 09:38:41 crc kubenswrapper[4788]: E1211 09:38:41.180515 4788 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 11 09:38:41 crc kubenswrapper[4788]: E1211 09:38:41.180608 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs podName:f8ee8e1d-0519-4464-9ca1-17d37770dcdc nodeName:}" failed. No retries permitted until 2025-12-11 09:38:49.18058236 +0000 UTC m=+1059.251362146 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs") pod "openstack-operator-controller-manager-65d64bdc49-5hs5g" (UID: "f8ee8e1d-0519-4464-9ca1-17d37770dcdc") : secret "metrics-server-cert" not found Dec 11 09:38:45 crc kubenswrapper[4788]: E1211 09:38:45.083846 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:981b6a8f95934a86c5f10ef6e198b07265aeba7f11cf84b9ccd13dfaf06f3ca3" Dec 11 09:38:45 crc kubenswrapper[4788]: E1211 09:38:45.084997 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:981b6a8f95934a86c5f10ef6e198b07265aeba7f11cf84b9ccd13dfaf06f3ca3,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gbfcc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-6c677c69b-qr4bz_openstack-operators(f2525214-ff81-4638-baa5-afcd178f9ec6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:38:46 crc kubenswrapper[4788]: E1211 09:38:46.156223 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 11 09:38:46 crc kubenswrapper[4788]: E1211 09:38:46.157069 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m7qmd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-zqgjl_openstack-operators(c0d61f31-e8b5-454d-8961-cedc33a2efa2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:38:46 crc kubenswrapper[4788]: E1211 09:38:46.753391 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87" Dec 11 09:38:46 crc kubenswrapper[4788]: E1211 09:38:46.753569 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:5bdb3685be3ddc1efd62e16aaf2fa96ead64315e26d52b1b2a7d8ac01baa1e87,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-795p9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-967d97867-vzs5b_openstack-operators(7707f72a-2719-46de-8409-b8d397a4ce03): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:38:47 crc kubenswrapper[4788]: E1211 09:38:47.380541 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:44126f9c6b1d2bf752ddf989e20a4fc4cc1c07723d4fcb78465ccb2f55da6b3a" Dec 11 09:38:47 crc kubenswrapper[4788]: E1211 09:38:47.380776 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:44126f9c6b1d2bf752ddf989e20a4fc4cc1c07723d4fcb78465ccb2f55da6b3a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-njsmd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-5b5fd79c9c-lcx6f_openstack-operators(354b87a3-d193-427f-8620-f7fcb52acb67): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:38:48 crc kubenswrapper[4788]: I1211 09:38:48.195369 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert\") pod \"infra-operator-controller-manager-78d48bff9d-b7lzd\" (UID: \"2868074d-eb62-4d8a-b275-047d72fec830\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:48 crc kubenswrapper[4788]: I1211 09:38:48.202801 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2868074d-eb62-4d8a-b275-047d72fec830-cert\") pod \"infra-operator-controller-manager-78d48bff9d-b7lzd\" (UID: \"2868074d-eb62-4d8a-b275-047d72fec830\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:48 crc kubenswrapper[4788]: I1211 09:38:48.368975 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:38:48 crc kubenswrapper[4788]: I1211 09:38:48.601012 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879ff2lw9\" (UID: \"6491a596-c46a-45c7-9430-4d9f6a40a6d2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:48 crc kubenswrapper[4788]: I1211 09:38:48.607817 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6491a596-c46a-45c7-9430-4d9f6a40a6d2-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879ff2lw9\" (UID: \"6491a596-c46a-45c7-9430-4d9f6a40a6d2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:48 crc kubenswrapper[4788]: I1211 09:38:48.667295 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:38:49 crc kubenswrapper[4788]: I1211 09:38:49.209637 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:49 crc kubenswrapper[4788]: I1211 09:38:49.209749 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:49 crc kubenswrapper[4788]: E1211 09:38:49.209799 4788 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 11 09:38:49 crc kubenswrapper[4788]: E1211 09:38:49.209869 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs podName:f8ee8e1d-0519-4464-9ca1-17d37770dcdc nodeName:}" failed. No retries permitted until 2025-12-11 09:39:05.209853103 +0000 UTC m=+1075.280632689 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs") pod "openstack-operator-controller-manager-65d64bdc49-5hs5g" (UID: "f8ee8e1d-0519-4464-9ca1-17d37770dcdc") : secret "webhook-server-cert" not found Dec 11 09:38:49 crc kubenswrapper[4788]: I1211 09:38:49.216837 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-metrics-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:38:59 crc kubenswrapper[4788]: E1211 09:38:59.769220 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 11 09:38:59 crc kubenswrapper[4788]: E1211 09:38:59.770323 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8xxp4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-hlfq2_openstack-operators(f7a4db33-474d-496e-b745-939ce842904d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:39:00 crc kubenswrapper[4788]: E1211 09:39:00.240804 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad" Dec 11 09:39:00 crc kubenswrapper[4788]: E1211 09:39:00.241305 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lp7m8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-79c8c4686c-dmnk5_openstack-operators(d78d8bad-e298-41b5-82fa-d4cf464d28dd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:39:00 crc kubenswrapper[4788]: E1211 09:39:00.965446 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 11 09:39:00 crc kubenswrapper[4788]: E1211 09:39:00.965650 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qdrlz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-mpztw_openstack-operators(bf947be7-c3ef-4ae6-beff-11d5ae6d1f94): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:39:01 crc kubenswrapper[4788]: E1211 09:39:01.498809 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 11 09:39:01 crc kubenswrapper[4788]: E1211 09:39:01.499455 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gbfcc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-6c677c69b-qr4bz_openstack-operators(f2525214-ff81-4638-baa5-afcd178f9ec6): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 11 09:39:01 crc kubenswrapper[4788]: E1211 09:39:01.501853 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" podUID="f2525214-ff81-4638-baa5-afcd178f9ec6" Dec 11 09:39:02 crc kubenswrapper[4788]: E1211 09:39:02.013211 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 11 09:39:02 crc kubenswrapper[4788]: E1211 09:39:02.013521 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m7qmd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-zqgjl_openstack-operators(c0d61f31-e8b5-454d-8961-cedc33a2efa2): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 11 09:39:02 crc kubenswrapper[4788]: E1211 09:39:02.014807 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" podUID="c0d61f31-e8b5-454d-8961-cedc33a2efa2" Dec 11 09:39:02 crc kubenswrapper[4788]: I1211 09:39:02.493252 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd"] Dec 11 09:39:02 crc kubenswrapper[4788]: I1211 09:39:02.547202 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9"] Dec 11 09:39:03 crc kubenswrapper[4788]: W1211 09:39:03.616395 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2868074d_eb62_4d8a_b275_047d72fec830.slice/crio-6ccd6770fa059d580aeb2e621bd0710aaea9d589e81dd6a948638f06295dd4fd WatchSource:0}: Error finding container 6ccd6770fa059d580aeb2e621bd0710aaea9d589e81dd6a948638f06295dd4fd: Status 404 returned error can't find the container with id 6ccd6770fa059d580aeb2e621bd0710aaea9d589e81dd6a948638f06295dd4fd Dec 11 09:39:03 crc kubenswrapper[4788]: I1211 09:39:03.776207 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 09:39:04 crc kubenswrapper[4788]: I1211 09:39:04.051912 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z" event={"ID":"348b99db-d5ca-41e2-b2a0-f22f6aeca6b0","Type":"ContainerStarted","Data":"c0cdb4229c34d1e64093bdb81e219d06bb35d8a90c818e6af2ebe487a8ab9955"} Dec 11 09:39:04 crc kubenswrapper[4788]: I1211 09:39:04.053556 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" event={"ID":"2868074d-eb62-4d8a-b275-047d72fec830","Type":"ContainerStarted","Data":"6ccd6770fa059d580aeb2e621bd0710aaea9d589e81dd6a948638f06295dd4fd"} Dec 11 09:39:04 crc kubenswrapper[4788]: I1211 09:39:04.054979 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" event={"ID":"6491a596-c46a-45c7-9430-4d9f6a40a6d2","Type":"ContainerStarted","Data":"b283ebb8723db154040d5c4e204ebeb409cbeab12d8533bbc2328298856bb865"} Dec 11 09:39:05 crc kubenswrapper[4788]: I1211 09:39:05.083988 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7" event={"ID":"ce133f4a-b1fd-4e51-8e4f-390d6f125e1d","Type":"ContainerStarted","Data":"8a0f10ebdfcf8b5f7901e957c14aebcb7eb9dd982074ebd45d36210115e81049"} Dec 11 09:39:05 crc kubenswrapper[4788]: I1211 09:39:05.086434 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld" event={"ID":"684c9c50-d818-41d8-852d-82f5937c18ab","Type":"ContainerStarted","Data":"1318dbfd3854f1d7cb1b3d708a6dc31fd33b06de66e4a27e2a6526bbed7d3c0e"} Dec 11 09:39:05 crc kubenswrapper[4788]: I1211 09:39:05.095858 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-688sb" event={"ID":"cb780059-66e2-48f4-913b-271489226ef9","Type":"ContainerStarted","Data":"d7e429b817c1fd93224c8b633d02fe5f2e43367d530c160b5c0c8e287ba1bbdc"} Dec 11 09:39:05 crc kubenswrapper[4788]: I1211 09:39:05.099751 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-brq4g" event={"ID":"556f2b13-91d6-4261-9e7a-bed452e436eb","Type":"ContainerStarted","Data":"9dcc3f3dcf8debaac487a2b360001d6cd2a9c533dc46adda5af6643ab74aaac4"} Dec 11 09:39:05 crc kubenswrapper[4788]: I1211 09:39:05.291572 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:39:05 crc kubenswrapper[4788]: I1211 09:39:05.308171 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f8ee8e1d-0519-4464-9ca1-17d37770dcdc-webhook-certs\") pod \"openstack-operator-controller-manager-65d64bdc49-5hs5g\" (UID: \"f8ee8e1d-0519-4464-9ca1-17d37770dcdc\") " pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:39:05 crc kubenswrapper[4788]: I1211 09:39:05.554066 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:39:06 crc kubenswrapper[4788]: I1211 09:39:06.110936 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh" event={"ID":"0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795","Type":"ContainerStarted","Data":"b97921d71e59265d5487f02219dde1da6bfd457c202effef6d9e47d54b967af9"} Dec 11 09:39:06 crc kubenswrapper[4788]: I1211 09:39:06.112677 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt" event={"ID":"03f510d3-616e-454c-9086-687604b0cff1","Type":"ContainerStarted","Data":"624dbf703f397d24d7524bacbcb6a5993593a5f4200f6d12a04edabb59ce48e0"} Dec 11 09:39:06 crc kubenswrapper[4788]: I1211 09:39:06.117648 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw" event={"ID":"5fa7cb98-c29a-4efb-81ff-710523478ec0","Type":"ContainerStarted","Data":"0a7b4c859efd35017ff1174615aaaad1f303d73396156b442a10f07a6bd2f33d"} Dec 11 09:39:06 crc kubenswrapper[4788]: I1211 09:39:06.118786 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" event={"ID":"7e94e3ad-b1bf-44e3-aa17-52380cb0e651","Type":"ContainerStarted","Data":"b88947b3f5f509f48ad2733e4f90373ca0e7aec1fc17dbe5ae14885a24846d6e"} Dec 11 09:39:06 crc kubenswrapper[4788]: I1211 09:39:06.120318 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-gz22l" event={"ID":"f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d","Type":"ContainerStarted","Data":"8f673eaeeb7fff7d2e506fb02240a6cbf4f892de3aecdf542a5b3e1bc5319760"} Dec 11 09:39:07 crc kubenswrapper[4788]: I1211 09:39:07.134816 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" event={"ID":"726d9db1-d370-4bea-b91d-6beff7ba4b6b","Type":"ContainerStarted","Data":"1550a32090ae064d68c89d4252257a0f2bdeb24b2c22dd63202fba1f1927faf1"} Dec 11 09:39:07 crc kubenswrapper[4788]: E1211 09:39:07.388761 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" podUID="c0d61f31-e8b5-454d-8961-cedc33a2efa2" Dec 11 09:39:07 crc kubenswrapper[4788]: E1211 09:39:07.397189 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" podUID="f2525214-ff81-4638-baa5-afcd178f9ec6" Dec 11 09:39:07 crc kubenswrapper[4788]: I1211 09:39:07.447356 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g"] Dec 11 09:39:08 crc kubenswrapper[4788]: I1211 09:39:08.152636 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb" event={"ID":"67082483-5ed3-4141-a7be-d3f95f5b07c4","Type":"ContainerStarted","Data":"9074bbef742f034727008546af3a058b2c240acc5b109f32abd943dd559120a1"} Dec 11 09:39:08 crc kubenswrapper[4788]: I1211 09:39:08.154209 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" event={"ID":"c0d61f31-e8b5-454d-8961-cedc33a2efa2","Type":"ContainerStarted","Data":"fec8d82a24775af71c28c7c3087a41fce5b4f7eae911c201a4bfc97f328a2266"} Dec 11 09:39:08 crc kubenswrapper[4788]: I1211 09:39:08.154979 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" Dec 11 09:39:08 crc kubenswrapper[4788]: E1211 09:39:08.159800 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" podUID="c0d61f31-e8b5-454d-8961-cedc33a2efa2" Dec 11 09:39:08 crc kubenswrapper[4788]: I1211 09:39:08.167540 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" event={"ID":"f6e78ec3-ccc1-48b2-8ba6-962de2a25249","Type":"ContainerStarted","Data":"8852a17573e3221575c4c3c3ae6af1e451d688e5124d58d4457db51f5e9f770e"} Dec 11 09:39:08 crc kubenswrapper[4788]: I1211 09:39:08.170402 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" event={"ID":"f2525214-ff81-4638-baa5-afcd178f9ec6","Type":"ContainerStarted","Data":"f077129fcb7c15431b3c20f86a8c642b1109a423550f49e43f99029e15cea601"} Dec 11 09:39:08 crc kubenswrapper[4788]: I1211 09:39:08.170602 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" Dec 11 09:39:08 crc kubenswrapper[4788]: E1211 09:39:08.174371 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" podUID="f2525214-ff81-4638-baa5-afcd178f9ec6" Dec 11 09:39:08 crc kubenswrapper[4788]: I1211 09:39:08.178869 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gbfnb" podStartSLOduration=4.750896214 podStartE2EDuration="35.178857414s" podCreationTimestamp="2025-12-11 09:38:33 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.512675827 +0000 UTC m=+1044.583455413" lastFinishedPulling="2025-12-11 09:39:04.940637027 +0000 UTC m=+1075.011416613" observedRunningTime="2025-12-11 09:39:08.177727575 +0000 UTC m=+1078.248507161" watchObservedRunningTime="2025-12-11 09:39:08.178857414 +0000 UTC m=+1078.249637000" Dec 11 09:39:08 crc kubenswrapper[4788]: W1211 09:39:08.561965 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8ee8e1d_0519_4464_9ca1_17d37770dcdc.slice/crio-1b71f9da8417f9bc1a6da2f49936431e5e76bcbd1aa254fdf2eb8ecbf85865ae WatchSource:0}: Error finding container 1b71f9da8417f9bc1a6da2f49936431e5e76bcbd1aa254fdf2eb8ecbf85865ae: Status 404 returned error can't find the container with id 1b71f9da8417f9bc1a6da2f49936431e5e76bcbd1aa254fdf2eb8ecbf85865ae Dec 11 09:39:09 crc kubenswrapper[4788]: I1211 09:39:09.219152 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" event={"ID":"f8ee8e1d-0519-4464-9ca1-17d37770dcdc","Type":"ContainerStarted","Data":"1b71f9da8417f9bc1a6da2f49936431e5e76bcbd1aa254fdf2eb8ecbf85865ae"} Dec 11 09:39:09 crc kubenswrapper[4788]: E1211 09:39:09.222414 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" podUID="f2525214-ff81-4638-baa5-afcd178f9ec6" Dec 11 09:39:09 crc kubenswrapper[4788]: E1211 09:39:09.223089 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" podUID="c0d61f31-e8b5-454d-8961-cedc33a2efa2" Dec 11 09:39:10 crc kubenswrapper[4788]: I1211 09:39:10.247487 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" event={"ID":"2868074d-eb62-4d8a-b275-047d72fec830","Type":"ContainerStarted","Data":"79726ba1b1ba5e4087a26864165f11f42af3d42f537846ed333babbe81269d24"} Dec 11 09:39:10 crc kubenswrapper[4788]: I1211 09:39:10.249537 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" event={"ID":"6491a596-c46a-45c7-9430-4d9f6a40a6d2","Type":"ContainerStarted","Data":"c115b310f7c1e3cbef18e6fe94787e4b2e5cacde694b04cb8462438cf7dc3879"} Dec 11 09:39:10 crc kubenswrapper[4788]: I1211 09:39:10.253169 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" event={"ID":"f8ee8e1d-0519-4464-9ca1-17d37770dcdc","Type":"ContainerStarted","Data":"17b2f2a2bf7b182327e4aecdc5fca872942304b5cd43cf1e1f4944ebc9f3c616"} Dec 11 09:39:10 crc kubenswrapper[4788]: I1211 09:39:10.253446 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:39:10 crc kubenswrapper[4788]: I1211 09:39:10.303815 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" podStartSLOduration=38.303783399 podStartE2EDuration="38.303783399s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:39:10.301664185 +0000 UTC m=+1080.372443761" watchObservedRunningTime="2025-12-11 09:39:10.303783399 +0000 UTC m=+1080.374562985" Dec 11 09:39:10 crc kubenswrapper[4788]: E1211 09:39:10.328805 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" podUID="bf947be7-c3ef-4ae6-beff-11d5ae6d1f94" Dec 11 09:39:10 crc kubenswrapper[4788]: E1211 09:39:10.876243 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" podUID="7707f72a-2719-46de-8409-b8d397a4ce03" Dec 11 09:39:10 crc kubenswrapper[4788]: E1211 09:39:10.879214 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" podUID="f7a4db33-474d-496e-b745-939ce842904d" Dec 11 09:39:10 crc kubenswrapper[4788]: E1211 09:39:10.879486 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" podUID="354b87a3-d193-427f-8620-f7fcb52acb67" Dec 11 09:39:10 crc kubenswrapper[4788]: E1211 09:39:10.879814 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" podUID="d78d8bad-e298-41b5-82fa-d4cf464d28dd" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.264196 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-688sb" event={"ID":"cb780059-66e2-48f4-913b-271489226ef9","Type":"ContainerStarted","Data":"8b43210e4ab1e40eba2273895fa18777c61eafda131610bb4d510e61c619ff53"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.265345 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-688sb" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.267168 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-gz22l" event={"ID":"f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d","Type":"ContainerStarted","Data":"f79824f99e7e01d7136580c6110d7e8968c03f14e7327306d5be054494e1ec5b"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.267730 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-gz22l" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.270135 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh" event={"ID":"0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795","Type":"ContainerStarted","Data":"96a1a7723b7d104abff73a44b886ab22af760b36c970f62c0bbc8c583d6315f5"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.270657 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.272858 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt" event={"ID":"03f510d3-616e-454c-9086-687604b0cff1","Type":"ContainerStarted","Data":"0219e0c175657a8a1e87a47ba5007a9c43de6c829ac679436efa67039611b772"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.273299 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.274295 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-688sb" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.275509 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.275851 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" event={"ID":"726d9db1-d370-4bea-b91d-6beff7ba4b6b","Type":"ContainerStarted","Data":"7fe0c1a3285d9b7db1d4e9647aee449f2c52c5cdc99c8a6c5dcbc27e69e60325"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.276494 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.278054 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld" event={"ID":"684c9c50-d818-41d8-852d-82f5937c18ab","Type":"ContainerStarted","Data":"b0aad16dabbc7c7086ffa8a5cc16c2e00fb7a923ad17faf799f42f4a7d982b83"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.278397 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.280393 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" event={"ID":"6491a596-c46a-45c7-9430-4d9f6a40a6d2","Type":"ContainerStarted","Data":"d15161e5148dc5b445043dbf7a11256e2b7a4c346270cbf7060aa796c4c7bc9d"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.281080 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.290248 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-gz22l" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.298834 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.311662 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" event={"ID":"7e94e3ad-b1bf-44e3-aa17-52380cb0e651","Type":"ContainerStarted","Data":"abcdb6747f03a57a35dbe912aeacfa983dd4e9c2d266987cfe0e45d7ac66a774"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.312694 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.314527 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" event={"ID":"f7a4db33-474d-496e-b745-939ce842904d","Type":"ContainerStarted","Data":"098af7c508fb9d6dcaf7bfae4d62e3947b16ed3813076e2a42a78bb5943ee9c4"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.316344 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" event={"ID":"d78d8bad-e298-41b5-82fa-d4cf464d28dd","Type":"ContainerStarted","Data":"8c10616146b174cbc9b13bc16c1649e413b4458b08fae02ff61e7dd6ad05b4ae"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.319150 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" event={"ID":"354b87a3-d193-427f-8620-f7fcb52acb67","Type":"ContainerStarted","Data":"c709f5f9d36e2c6728b20009fa15d7fbf8e3fbcd71dba21f524439e4af5dafae"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.324263 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" event={"ID":"7707f72a-2719-46de-8409-b8d397a4ce03","Type":"ContainerStarted","Data":"2f2e4767b57c36230a0ec6a4184a8d603a2d5686ef3942d233f3f0210f061b2e"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.327335 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-brq4g" event={"ID":"556f2b13-91d6-4261-9e7a-bed452e436eb","Type":"ContainerStarted","Data":"90ead1760c980234d754517c1ff6170043afb369ac6563aa1fb46c0de9a5644a"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.328697 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-brq4g" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.330480 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-brq4g" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.332289 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.333217 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.334710 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw" event={"ID":"5fa7cb98-c29a-4efb-81ff-710523478ec0","Type":"ContainerStarted","Data":"bf36fa1dec3a52da6bc9ac142215ec138a8cec262746721c47857267d2c7d537"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.335177 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.341190 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" event={"ID":"2868074d-eb62-4d8a-b275-047d72fec830","Type":"ContainerStarted","Data":"c72f2ebdb0e43726c8504f0127d60782710f28af8bafa5198e43a70b6cd6eee4"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.341560 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.343417 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z" event={"ID":"348b99db-d5ca-41e2-b2a0-f22f6aeca6b0","Type":"ContainerStarted","Data":"7657dc35ce0e34ad119701013e7b798cc08d40c3d20304216c3f4005f1a7389e"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.344196 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.348593 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.348770 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" event={"ID":"f6e78ec3-ccc1-48b2-8ba6-962de2a25249","Type":"ContainerStarted","Data":"57226dc69b94bcd0191b75a210f1aa9e9e9fda484fdd90f92cfe612acd85f883"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.349205 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.353935 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" event={"ID":"bf947be7-c3ef-4ae6-beff-11d5ae6d1f94","Type":"ContainerStarted","Data":"e7ab7dcd7d432322004812b8c18d318746d464fe482d96c77f761f29898fcda2"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.362519 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7" event={"ID":"ce133f4a-b1fd-4e51-8e4f-390d6f125e1d","Type":"ContainerStarted","Data":"4ce3ed3b69e5e6677b3c2661b89af73d5d57074d15dccccd0aa0ebdc66a4f38d"} Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.364118 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-688sb" podStartSLOduration=4.446156788 podStartE2EDuration="40.364102413s" podCreationTimestamp="2025-12-11 09:38:31 +0000 UTC" firstStartedPulling="2025-12-11 09:38:33.806207126 +0000 UTC m=+1043.876986712" lastFinishedPulling="2025-12-11 09:39:09.724152751 +0000 UTC m=+1079.794932337" observedRunningTime="2025-12-11 09:39:11.362002069 +0000 UTC m=+1081.432781665" watchObservedRunningTime="2025-12-11 09:39:11.364102413 +0000 UTC m=+1081.434881999" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.366528 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.369077 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.394156 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-zr4vh" podStartSLOduration=3.240718702 podStartE2EDuration="39.394139923s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:33.725472927 +0000 UTC m=+1043.796252513" lastFinishedPulling="2025-12-11 09:39:09.878894148 +0000 UTC m=+1079.949673734" observedRunningTime="2025-12-11 09:39:11.392825739 +0000 UTC m=+1081.463605345" watchObservedRunningTime="2025-12-11 09:39:11.394139923 +0000 UTC m=+1081.464919509" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.477378 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" podStartSLOduration=3.9196836680000002 podStartE2EDuration="39.477360866s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.392817135 +0000 UTC m=+1044.463596721" lastFinishedPulling="2025-12-11 09:39:09.950494333 +0000 UTC m=+1080.021273919" observedRunningTime="2025-12-11 09:39:11.435500873 +0000 UTC m=+1081.506280459" watchObservedRunningTime="2025-12-11 09:39:11.477360866 +0000 UTC m=+1081.548140452" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.549074 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-v87tj" podStartSLOduration=4.063898235 podStartE2EDuration="39.549056934s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.379428122 +0000 UTC m=+1044.450207708" lastFinishedPulling="2025-12-11 09:39:09.864586821 +0000 UTC m=+1079.935366407" observedRunningTime="2025-12-11 09:39:11.545577195 +0000 UTC m=+1081.616356791" watchObservedRunningTime="2025-12-11 09:39:11.549056934 +0000 UTC m=+1081.619836520" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.619763 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-brq4g" podStartSLOduration=4.073579034 podStartE2EDuration="39.619744617s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.177106955 +0000 UTC m=+1044.247886541" lastFinishedPulling="2025-12-11 09:39:09.723272528 +0000 UTC m=+1079.794052124" observedRunningTime="2025-12-11 09:39:11.618390042 +0000 UTC m=+1081.689169628" watchObservedRunningTime="2025-12-11 09:39:11.619744617 +0000 UTC m=+1081.690524203" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.681869 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-z7q4w" podStartSLOduration=4.132485443 podStartE2EDuration="39.681835218s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.346528038 +0000 UTC m=+1044.417307634" lastFinishedPulling="2025-12-11 09:39:09.895877833 +0000 UTC m=+1079.966657409" observedRunningTime="2025-12-11 09:39:11.676926523 +0000 UTC m=+1081.747706109" watchObservedRunningTime="2025-12-11 09:39:11.681835218 +0000 UTC m=+1081.752614814" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.723313 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-nnspt" podStartSLOduration=4.210730023 podStartE2EDuration="40.723284511s" podCreationTimestamp="2025-12-11 09:38:31 +0000 UTC" firstStartedPulling="2025-12-11 09:38:33.17364993 +0000 UTC m=+1043.244429516" lastFinishedPulling="2025-12-11 09:39:09.686204418 +0000 UTC m=+1079.756984004" observedRunningTime="2025-12-11 09:39:11.711816407 +0000 UTC m=+1081.782595993" watchObservedRunningTime="2025-12-11 09:39:11.723284511 +0000 UTC m=+1081.794064097" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.744826 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-kdjld" podStartSLOduration=4.098940915 podStartE2EDuration="39.744796663s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.077736158 +0000 UTC m=+1044.148515744" lastFinishedPulling="2025-12-11 09:39:09.723591906 +0000 UTC m=+1079.794371492" observedRunningTime="2025-12-11 09:39:11.743561261 +0000 UTC m=+1081.814340847" watchObservedRunningTime="2025-12-11 09:39:11.744796663 +0000 UTC m=+1081.815576249" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.782494 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-p9v6z" podStartSLOduration=4.00398736 podStartE2EDuration="39.782473128s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:33.9448007 +0000 UTC m=+1044.015580286" lastFinishedPulling="2025-12-11 09:39:09.723286468 +0000 UTC m=+1079.794066054" observedRunningTime="2025-12-11 09:39:11.777916702 +0000 UTC m=+1081.848696308" watchObservedRunningTime="2025-12-11 09:39:11.782473128 +0000 UTC m=+1081.853252714" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.917111 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-gz22l" podStartSLOduration=4.322482875 podStartE2EDuration="39.91709387s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.169208713 +0000 UTC m=+1044.239988299" lastFinishedPulling="2025-12-11 09:39:09.763819708 +0000 UTC m=+1079.834599294" observedRunningTime="2025-12-11 09:39:11.910714556 +0000 UTC m=+1081.981494142" watchObservedRunningTime="2025-12-11 09:39:11.91709387 +0000 UTC m=+1081.987873456" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.952704 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" podStartSLOduration=33.976560684 podStartE2EDuration="39.952670702s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:39:03.624039424 +0000 UTC m=+1073.694819010" lastFinishedPulling="2025-12-11 09:39:09.600149442 +0000 UTC m=+1079.670929028" observedRunningTime="2025-12-11 09:39:11.9440178 +0000 UTC m=+1082.014797396" watchObservedRunningTime="2025-12-11 09:39:11.952670702 +0000 UTC m=+1082.023450288" Dec 11 09:39:11 crc kubenswrapper[4788]: I1211 09:39:11.995163 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pkxtw" podStartSLOduration=4.091582545 podStartE2EDuration="39.99513088s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.072615276 +0000 UTC m=+1044.143394862" lastFinishedPulling="2025-12-11 09:39:09.976163611 +0000 UTC m=+1080.046943197" observedRunningTime="2025-12-11 09:39:11.988350807 +0000 UTC m=+1082.059130393" watchObservedRunningTime="2025-12-11 09:39:11.99513088 +0000 UTC m=+1082.065910466" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.047561 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" podStartSLOduration=34.046339213 podStartE2EDuration="40.047536714s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:39:03.625111511 +0000 UTC m=+1073.695891097" lastFinishedPulling="2025-12-11 09:39:09.626309012 +0000 UTC m=+1079.697088598" observedRunningTime="2025-12-11 09:39:12.038830421 +0000 UTC m=+1082.109610007" watchObservedRunningTime="2025-12-11 09:39:12.047536714 +0000 UTC m=+1082.118316300" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.209388 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7" podStartSLOduration=4.633330095 podStartE2EDuration="40.209359113s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.375871701 +0000 UTC m=+1044.446651287" lastFinishedPulling="2025-12-11 09:39:09.951900729 +0000 UTC m=+1080.022680305" observedRunningTime="2025-12-11 09:39:12.205597946 +0000 UTC m=+1082.276377532" watchObservedRunningTime="2025-12-11 09:39:12.209359113 +0000 UTC m=+1082.280138699" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.335544 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.386366 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" event={"ID":"f7a4db33-474d-496e-b745-939ce842904d","Type":"ContainerStarted","Data":"ee4e35428e989ca87143deabee3e405a9a3aadea9311c15f5687f02af64fa107"} Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.387883 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.394051 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" event={"ID":"d78d8bad-e298-41b5-82fa-d4cf464d28dd","Type":"ContainerStarted","Data":"db909148be1b68c9c121c495a005d9698f365d07507202e9347388513f73bf2b"} Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.395043 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.404991 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" event={"ID":"354b87a3-d193-427f-8620-f7fcb52acb67","Type":"ContainerStarted","Data":"e43b7d81943428a51466ded1d4bfa2a496a24bfe3998118217d2847583e48ea6"} Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.405044 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.410867 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.881309 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" podStartSLOduration=3.244061218 podStartE2EDuration="40.881286359s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.230208126 +0000 UTC m=+1044.300987712" lastFinishedPulling="2025-12-11 09:39:11.867433267 +0000 UTC m=+1081.938212853" observedRunningTime="2025-12-11 09:39:12.825044907 +0000 UTC m=+1082.895824513" watchObservedRunningTime="2025-12-11 09:39:12.881286359 +0000 UTC m=+1082.952065945" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.882950 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" podStartSLOduration=2.807572598 podStartE2EDuration="40.882940711s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:33.945117758 +0000 UTC m=+1044.015897344" lastFinishedPulling="2025-12-11 09:39:12.020485871 +0000 UTC m=+1082.091265457" observedRunningTime="2025-12-11 09:39:12.881445373 +0000 UTC m=+1082.952224959" watchObservedRunningTime="2025-12-11 09:39:12.882940711 +0000 UTC m=+1082.953720297" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.893567 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-sxb22" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.893608 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-kggr7" Dec 11 09:39:12 crc kubenswrapper[4788]: I1211 09:39:12.904543 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" podStartSLOduration=2.779078598 podStartE2EDuration="40.904515844s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:33.741316023 +0000 UTC m=+1043.812095609" lastFinishedPulling="2025-12-11 09:39:11.866753259 +0000 UTC m=+1081.937532855" observedRunningTime="2025-12-11 09:39:12.899140856 +0000 UTC m=+1082.969920442" watchObservedRunningTime="2025-12-11 09:39:12.904515844 +0000 UTC m=+1082.975295430" Dec 11 09:39:13 crc kubenswrapper[4788]: I1211 09:39:13.040959 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" Dec 11 09:39:13 crc kubenswrapper[4788]: I1211 09:39:13.412914 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" event={"ID":"7707f72a-2719-46de-8409-b8d397a4ce03","Type":"ContainerStarted","Data":"3f8ad6fd1e2510e35897177c673ac0bd36f0aeff4fb4fd9db0806d90b869e776"} Dec 11 09:39:13 crc kubenswrapper[4788]: I1211 09:39:13.413072 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" Dec 11 09:39:13 crc kubenswrapper[4788]: I1211 09:39:13.415533 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" event={"ID":"f2525214-ff81-4638-baa5-afcd178f9ec6","Type":"ContainerStarted","Data":"c81ea566bac562960a9b09f5c11bdc85ae49273a99dcfcecf503a063e3d97cee"} Dec 11 09:39:13 crc kubenswrapper[4788]: I1211 09:39:13.417011 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" event={"ID":"c0d61f31-e8b5-454d-8961-cedc33a2efa2","Type":"ContainerStarted","Data":"7c19d913e273602636e5f67bdf188ec36bb2b21866da9b79c9130f2e239946b3"} Dec 11 09:39:13 crc kubenswrapper[4788]: I1211 09:39:13.419732 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" event={"ID":"bf947be7-c3ef-4ae6-beff-11d5ae6d1f94","Type":"ContainerStarted","Data":"8733d574e12bf6f73f387275e136bc98ad860c66170e4f1cb1774635dd2774f9"} Dec 11 09:39:13 crc kubenswrapper[4788]: I1211 09:39:13.472075 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-zqgjl" podStartSLOduration=10.725771783999999 podStartE2EDuration="41.472033684s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.195167908 +0000 UTC m=+1044.265947494" lastFinishedPulling="2025-12-11 09:39:04.941429808 +0000 UTC m=+1075.012209394" observedRunningTime="2025-12-11 09:39:13.463759751 +0000 UTC m=+1083.534539337" watchObservedRunningTime="2025-12-11 09:39:13.472033684 +0000 UTC m=+1083.542813270" Dec 11 09:39:13 crc kubenswrapper[4788]: I1211 09:39:13.473773 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" podStartSLOduration=2.839686612 podStartE2EDuration="41.473756498s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:34.195434165 +0000 UTC m=+1044.266213751" lastFinishedPulling="2025-12-11 09:39:12.829504051 +0000 UTC m=+1082.900283637" observedRunningTime="2025-12-11 09:39:13.438665988 +0000 UTC m=+1083.509445574" watchObservedRunningTime="2025-12-11 09:39:13.473756498 +0000 UTC m=+1083.544536084" Dec 11 09:39:13 crc kubenswrapper[4788]: I1211 09:39:13.487647 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-qr4bz" podStartSLOduration=10.691648312 podStartE2EDuration="42.487626083s" podCreationTimestamp="2025-12-11 09:38:31 +0000 UTC" firstStartedPulling="2025-12-11 09:38:33.313892755 +0000 UTC m=+1043.384672331" lastFinishedPulling="2025-12-11 09:39:05.109870516 +0000 UTC m=+1075.180650102" observedRunningTime="2025-12-11 09:39:13.481311171 +0000 UTC m=+1083.552090747" watchObservedRunningTime="2025-12-11 09:39:13.487626083 +0000 UTC m=+1083.558405669" Dec 11 09:39:13 crc kubenswrapper[4788]: I1211 09:39:13.531505 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" podStartSLOduration=2.6537125440000002 podStartE2EDuration="41.531479758s" podCreationTimestamp="2025-12-11 09:38:32 +0000 UTC" firstStartedPulling="2025-12-11 09:38:33.421578236 +0000 UTC m=+1043.492357822" lastFinishedPulling="2025-12-11 09:39:12.29934545 +0000 UTC m=+1082.370125036" observedRunningTime="2025-12-11 09:39:13.527754252 +0000 UTC m=+1083.598533838" watchObservedRunningTime="2025-12-11 09:39:13.531479758 +0000 UTC m=+1083.602259344" Dec 11 09:39:14 crc kubenswrapper[4788]: I1211 09:39:14.429255 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" Dec 11 09:39:15 crc kubenswrapper[4788]: I1211 09:39:15.565389 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-65d64bdc49-5hs5g" Dec 11 09:39:18 crc kubenswrapper[4788]: I1211 09:39:18.805457 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-b7lzd" Dec 11 09:39:18 crc kubenswrapper[4788]: I1211 09:39:18.805846 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879ff2lw9" Dec 11 09:39:21 crc kubenswrapper[4788]: I1211 09:39:21.369132 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:39:21 crc kubenswrapper[4788]: I1211 09:39:21.369442 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:39:22 crc kubenswrapper[4788]: I1211 09:39:22.653738 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-lcx6f" Dec 11 09:39:22 crc kubenswrapper[4788]: I1211 09:39:22.817329 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-hlfq2" Dec 11 09:39:22 crc kubenswrapper[4788]: I1211 09:39:22.860385 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-dmnk5" Dec 11 09:39:22 crc kubenswrapper[4788]: I1211 09:39:22.894849 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-967d97867-vzs5b" Dec 11 09:39:23 crc kubenswrapper[4788]: I1211 09:39:23.675459 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" podUID="bf947be7-c3ef-4ae6-beff-11d5ae6d1f94" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.78:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 11 09:39:32 crc kubenswrapper[4788]: I1211 09:39:32.634732 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-mpztw" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.437961 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-p2kk8"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.440407 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.447884 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-p2kk8"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.449662 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.449764 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.449948 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.449961 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-n69bd" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.494317 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-fkn27"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.496276 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.499168 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.504302 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-fkn27"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.583061 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjt99\" (UniqueName: \"kubernetes.io/projected/2c3c5958-426b-4823-b239-5754a3f12a76-kube-api-access-sjt99\") pod \"dnsmasq-dns-78dd6ddcc-fkn27\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.583143 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-config\") pod \"dnsmasq-dns-78dd6ddcc-fkn27\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.583201 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef438d31-b79d-4f26-8676-ae6907d29629-config\") pod \"dnsmasq-dns-675f4bcbfc-p2kk8\" (UID: \"ef438d31-b79d-4f26-8676-ae6907d29629\") " pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.583279 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-fkn27\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.583309 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwzzb\" (UniqueName: \"kubernetes.io/projected/ef438d31-b79d-4f26-8676-ae6907d29629-kube-api-access-dwzzb\") pod \"dnsmasq-dns-675f4bcbfc-p2kk8\" (UID: \"ef438d31-b79d-4f26-8676-ae6907d29629\") " pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.684596 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-fkn27\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.684638 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwzzb\" (UniqueName: \"kubernetes.io/projected/ef438d31-b79d-4f26-8676-ae6907d29629-kube-api-access-dwzzb\") pod \"dnsmasq-dns-675f4bcbfc-p2kk8\" (UID: \"ef438d31-b79d-4f26-8676-ae6907d29629\") " pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.684710 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjt99\" (UniqueName: \"kubernetes.io/projected/2c3c5958-426b-4823-b239-5754a3f12a76-kube-api-access-sjt99\") pod \"dnsmasq-dns-78dd6ddcc-fkn27\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.684765 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-config\") pod \"dnsmasq-dns-78dd6ddcc-fkn27\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.684825 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef438d31-b79d-4f26-8676-ae6907d29629-config\") pod \"dnsmasq-dns-675f4bcbfc-p2kk8\" (UID: \"ef438d31-b79d-4f26-8676-ae6907d29629\") " pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.685622 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef438d31-b79d-4f26-8676-ae6907d29629-config\") pod \"dnsmasq-dns-675f4bcbfc-p2kk8\" (UID: \"ef438d31-b79d-4f26-8676-ae6907d29629\") " pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.685627 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-fkn27\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.685736 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-config\") pod \"dnsmasq-dns-78dd6ddcc-fkn27\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.707011 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjt99\" (UniqueName: \"kubernetes.io/projected/2c3c5958-426b-4823-b239-5754a3f12a76-kube-api-access-sjt99\") pod \"dnsmasq-dns-78dd6ddcc-fkn27\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.707028 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwzzb\" (UniqueName: \"kubernetes.io/projected/ef438d31-b79d-4f26-8676-ae6907d29629-kube-api-access-dwzzb\") pod \"dnsmasq-dns-675f4bcbfc-p2kk8\" (UID: \"ef438d31-b79d-4f26-8676-ae6907d29629\") " pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.760220 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:49.813091 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:51.369874 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:51.369938 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.658148 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-p2kk8"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.695187 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-zs5cl"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.696668 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.711070 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-zs5cl"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.728865 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79tj6\" (UniqueName: \"kubernetes.io/projected/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-kube-api-access-79tj6\") pod \"dnsmasq-dns-5ccc8479f9-zs5cl\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.729280 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-config\") pod \"dnsmasq-dns-5ccc8479f9-zs5cl\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.729410 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-zs5cl\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.832051 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79tj6\" (UniqueName: \"kubernetes.io/projected/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-kube-api-access-79tj6\") pod \"dnsmasq-dns-5ccc8479f9-zs5cl\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.832120 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-config\") pod \"dnsmasq-dns-5ccc8479f9-zs5cl\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.832171 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-zs5cl\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.833129 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-config\") pod \"dnsmasq-dns-5ccc8479f9-zs5cl\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.833195 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-zs5cl\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.853620 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79tj6\" (UniqueName: \"kubernetes.io/projected/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-kube-api-access-79tj6\") pod \"dnsmasq-dns-5ccc8479f9-zs5cl\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:52.976253 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-fkn27"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.001844 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8bf9n"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.003127 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.013259 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8bf9n"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.013577 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.137958 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-8bf9n\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.138032 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgkxm\" (UniqueName: \"kubernetes.io/projected/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-kube-api-access-wgkxm\") pod \"dnsmasq-dns-57d769cc4f-8bf9n\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.138085 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-config\") pod \"dnsmasq-dns-57d769cc4f-8bf9n\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.239892 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-8bf9n\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.239956 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgkxm\" (UniqueName: \"kubernetes.io/projected/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-kube-api-access-wgkxm\") pod \"dnsmasq-dns-57d769cc4f-8bf9n\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.239984 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-config\") pod \"dnsmasq-dns-57d769cc4f-8bf9n\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.240877 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-config\") pod \"dnsmasq-dns-57d769cc4f-8bf9n\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.241048 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-8bf9n\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.262193 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgkxm\" (UniqueName: \"kubernetes.io/projected/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-kube-api-access-wgkxm\") pod \"dnsmasq-dns-57d769cc4f-8bf9n\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:53.322662 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.393203 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.401650 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.404471 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.404564 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.404983 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-6gkpk" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.410301 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.415122 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.501431 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-combined-ca-bundle\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.501495 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-memcached-tls-certs\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.501601 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hlmr\" (UniqueName: \"kubernetes.io/projected/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-kube-api-access-9hlmr\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.501746 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-config-data\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.501775 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-kolla-config\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.603287 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-combined-ca-bundle\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.603329 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-memcached-tls-certs\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.603364 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hlmr\" (UniqueName: \"kubernetes.io/projected/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-kube-api-access-9hlmr\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.603440 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-config-data\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:57.603458 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-kolla-config\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.082074 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.084783 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.088840 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.089857 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-jgtd2" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.089923 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.090969 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.091544 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.093820 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.095176 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.095193 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.095280 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.095336 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.095446 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.097404 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.099860 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.100311 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-g5dfb" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.100706 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-nhvl8" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.100731 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.100805 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.102948 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.104819 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.105248 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.105252 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.108588 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.109104 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dbpfl" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.111854 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.114674 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.114856 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.114857 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.114913 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.115073 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.124327 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.147062 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.163078 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213081 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213163 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213204 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qzhq\" (UniqueName: \"kubernetes.io/projected/f98e1b46-07d0-44d9-810c-4a778d44837d-kube-api-access-9qzhq\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213283 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213312 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213350 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213374 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213413 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f98e1b46-07d0-44d9-810c-4a778d44837d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213593 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213673 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ef72aa19-1387-4180-957c-4bfec95e5562-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213730 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ef72aa19-1387-4180-957c-4bfec95e5562-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213785 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98e1b46-07d0-44d9-810c-4a778d44837d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213812 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213879 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213907 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98e1b46-07d0-44d9-810c-4a778d44837d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.213948 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2552ec58-e76a-4c17-ab79-ac237c6d972c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214016 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f98e1b46-07d0-44d9-810c-4a778d44837d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214103 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214285 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f98e1b46-07d0-44d9-810c-4a778d44837d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214339 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f98e1b46-07d0-44d9-810c-4a778d44837d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214375 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c377af1f-84a0-4b96-8fa2-0f66751cd40d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214412 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sq5f\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-kube-api-access-5sq5f\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214442 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2552ec58-e76a-4c17-ab79-ac237c6d972c-config-data-default\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214464 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214498 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c377af1f-84a0-4b96-8fa2-0f66751cd40d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214535 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214599 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2552ec58-e76a-4c17-ab79-ac237c6d972c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214664 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214705 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214738 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214775 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2552ec58-e76a-4c17-ab79-ac237c6d972c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214812 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm9g5\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-kube-api-access-jm9g5\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214837 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2552ec58-e76a-4c17-ab79-ac237c6d972c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214863 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214889 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-config-data\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214910 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mt7w\" (UniqueName: \"kubernetes.io/projected/2552ec58-e76a-4c17-ab79-ac237c6d972c-kube-api-access-5mt7w\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.214947 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.215169 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2552ec58-e76a-4c17-ab79-ac237c6d972c-kolla-config\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317059 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317119 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98e1b46-07d0-44d9-810c-4a778d44837d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317142 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2552ec58-e76a-4c17-ab79-ac237c6d972c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317175 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f98e1b46-07d0-44d9-810c-4a778d44837d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317202 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317261 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f98e1b46-07d0-44d9-810c-4a778d44837d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317280 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f98e1b46-07d0-44d9-810c-4a778d44837d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317299 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c377af1f-84a0-4b96-8fa2-0f66751cd40d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317316 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sq5f\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-kube-api-access-5sq5f\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317333 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2552ec58-e76a-4c17-ab79-ac237c6d972c-config-data-default\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317351 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317374 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c377af1f-84a0-4b96-8fa2-0f66751cd40d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317394 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317414 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2552ec58-e76a-4c17-ab79-ac237c6d972c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317437 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317458 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317480 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317501 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2552ec58-e76a-4c17-ab79-ac237c6d972c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317520 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm9g5\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-kube-api-access-jm9g5\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317540 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2552ec58-e76a-4c17-ab79-ac237c6d972c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317561 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317578 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-config-data\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317597 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mt7w\" (UniqueName: \"kubernetes.io/projected/2552ec58-e76a-4c17-ab79-ac237c6d972c-kube-api-access-5mt7w\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317620 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317654 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2552ec58-e76a-4c17-ab79-ac237c6d972c-kolla-config\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317691 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317711 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317745 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qzhq\" (UniqueName: \"kubernetes.io/projected/f98e1b46-07d0-44d9-810c-4a778d44837d-kube-api-access-9qzhq\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317775 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317796 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317821 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317839 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317862 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f98e1b46-07d0-44d9-810c-4a778d44837d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317882 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317907 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ef72aa19-1387-4180-957c-4bfec95e5562-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317933 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ef72aa19-1387-4180-957c-4bfec95e5562-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317959 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98e1b46-07d0-44d9-810c-4a778d44837d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.317975 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.322479 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.322744 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.322994 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.323975 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.535378 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-config-data\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.535433 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-memcached-tls-certs\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.535504 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-kolla-config\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.535692 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-combined-ca-bundle\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.535921 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hlmr\" (UniqueName: \"kubernetes.io/projected/eba9b8bc-1fe1-4ba9-9521-a21c25bed6be-kube-api-access-9hlmr\") pod \"memcached-0\" (UID: \"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be\") " pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.536160 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f98e1b46-07d0-44d9-810c-4a778d44837d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.538911 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f98e1b46-07d0-44d9-810c-4a778d44837d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.538942 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98e1b46-07d0-44d9-810c-4a778d44837d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.539096 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f98e1b46-07d0-44d9-810c-4a778d44837d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.539281 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f98e1b46-07d0-44d9-810c-4a778d44837d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.541693 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.541846 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.542445 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98e1b46-07d0-44d9-810c-4a778d44837d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.543471 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.543831 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-config-data\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.546454 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.548210 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2552ec58-e76a-4c17-ab79-ac237c6d972c-config-data-default\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.549680 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.548401 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2552ec58-e76a-4c17-ab79-ac237c6d972c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.552223 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.553561 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.553620 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2552ec58-e76a-4c17-ab79-ac237c6d972c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.553776 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.554216 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ef72aa19-1387-4180-957c-4bfec95e5562-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.554488 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm9g5\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-kube-api-access-jm9g5\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.555258 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2552ec58-e76a-4c17-ab79-ac237c6d972c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.556127 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.556608 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.557396 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qzhq\" (UniqueName: \"kubernetes.io/projected/f98e1b46-07d0-44d9-810c-4a778d44837d-kube-api-access-9qzhq\") pod \"openstack-cell1-galera-0\" (UID: \"f98e1b46-07d0-44d9-810c-4a778d44837d\") " pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.558741 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ef72aa19-1387-4180-957c-4bfec95e5562-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.558848 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sq5f\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-kube-api-access-5sq5f\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.562124 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.565337 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c377af1f-84a0-4b96-8fa2-0f66751cd40d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.566892 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c377af1f-84a0-4b96-8fa2-0f66751cd40d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.567999 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2552ec58-e76a-4c17-ab79-ac237c6d972c-kolla-config\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.571962 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mt7w\" (UniqueName: \"kubernetes.io/projected/2552ec58-e76a-4c17-ab79-ac237c6d972c-kube-api-access-5mt7w\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.595836 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.602014 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2552ec58-e76a-4c17-ab79-ac237c6d972c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.602029 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.615157 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.615918 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.620912 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.621047 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.626326 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"2552ec58-e76a-4c17-ab79-ac237c6d972c\") " pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.734140 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.835394 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.846733 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:58.848073 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:59.440146 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:59.441173 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:59.450137 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-wxqsj" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:59.539629 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:59.602417 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hzdg\" (UniqueName: \"kubernetes.io/projected/dd95dacb-1402-422f-a9c0-f1e8a4b6d01e-kube-api-access-8hzdg\") pod \"kube-state-metrics-0\" (UID: \"dd95dacb-1402-422f-a9c0-f1e8a4b6d01e\") " pod="openstack/kube-state-metrics-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:59.704432 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hzdg\" (UniqueName: \"kubernetes.io/projected/dd95dacb-1402-422f-a9c0-f1e8a4b6d01e-kube-api-access-8hzdg\") pod \"kube-state-metrics-0\" (UID: \"dd95dacb-1402-422f-a9c0-f1e8a4b6d01e\") " pod="openstack/kube-state-metrics-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:59.747295 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hzdg\" (UniqueName: \"kubernetes.io/projected/dd95dacb-1402-422f-a9c0-f1e8a4b6d01e-kube-api-access-8hzdg\") pod \"kube-state-metrics-0\" (UID: \"dd95dacb-1402-422f-a9c0-f1e8a4b6d01e\") " pod="openstack/kube-state-metrics-0" Dec 11 09:39:59 crc kubenswrapper[4788]: I1211 09:39:59.770449 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 11 09:40:00 crc kubenswrapper[4788]: I1211 09:40:00.389888 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-p2kk8"] Dec 11 09:40:00 crc kubenswrapper[4788]: I1211 09:40:00.417313 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 11 09:40:00 crc kubenswrapper[4788]: I1211 09:40:00.426468 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-zs5cl"] Dec 11 09:40:00 crc kubenswrapper[4788]: I1211 09:40:00.443775 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-fkn27"] Dec 11 09:40:00 crc kubenswrapper[4788]: I1211 09:40:00.471053 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 11 09:40:00 crc kubenswrapper[4788]: W1211 09:40:00.471867 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc377af1f_84a0_4b96_8fa2_0f66751cd40d.slice/crio-fd3cbee5cff20b35471fce4d1947910e37802ad956352bd7958e417cf4c7289d WatchSource:0}: Error finding container fd3cbee5cff20b35471fce4d1947910e37802ad956352bd7958e417cf4c7289d: Status 404 returned error can't find the container with id fd3cbee5cff20b35471fce4d1947910e37802ad956352bd7958e417cf4c7289d Dec 11 09:40:00 crc kubenswrapper[4788]: I1211 09:40:00.480582 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8bf9n"] Dec 11 09:40:00 crc kubenswrapper[4788]: I1211 09:40:00.495500 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 11 09:40:00 crc kubenswrapper[4788]: I1211 09:40:00.516663 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 11 09:40:00 crc kubenswrapper[4788]: I1211 09:40:00.516704 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 11 09:40:00 crc kubenswrapper[4788]: I1211 09:40:00.601673 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 11 09:40:01 crc kubenswrapper[4788]: I1211 09:40:01.134673 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dd95dacb-1402-422f-a9c0-f1e8a4b6d01e","Type":"ContainerStarted","Data":"a82ca67c80c9e4eb86fb5169b500814bb4167ce70346e321b5ce3606d486261b"} Dec 11 09:40:01 crc kubenswrapper[4788]: I1211 09:40:01.137219 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f98e1b46-07d0-44d9-810c-4a778d44837d","Type":"ContainerStarted","Data":"64c1bfbeb5c5a2495d016c298a22cf3157bc1fd5e25d9c638bf65c87f4ed8e2c"} Dec 11 09:40:01 crc kubenswrapper[4788]: I1211 09:40:01.138515 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c377af1f-84a0-4b96-8fa2-0f66751cd40d","Type":"ContainerStarted","Data":"fd3cbee5cff20b35471fce4d1947910e37802ad956352bd7958e417cf4c7289d"} Dec 11 09:40:01 crc kubenswrapper[4788]: I1211 09:40:01.140073 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" event={"ID":"ef438d31-b79d-4f26-8676-ae6907d29629","Type":"ContainerStarted","Data":"85815402bc945ad98e94905ce1f83170f7d3e01b008914df383338f8110ab726"} Dec 11 09:40:01 crc kubenswrapper[4788]: I1211 09:40:01.141534 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef72aa19-1387-4180-957c-4bfec95e5562","Type":"ContainerStarted","Data":"a8a1557262d2a19aba2965b49acc4e2ac64997e43396e16737d0fd93869979f1"} Dec 11 09:40:01 crc kubenswrapper[4788]: I1211 09:40:01.142939 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" event={"ID":"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621","Type":"ContainerStarted","Data":"d89bd5d88eb86cfee0b027312d78adcc3afe3b0e2facddfb3d670bf335709def"} Dec 11 09:40:01 crc kubenswrapper[4788]: I1211 09:40:01.144565 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2552ec58-e76a-4c17-ab79-ac237c6d972c","Type":"ContainerStarted","Data":"3b5fc367d1bd49c705ce3d93b7af87c53153bb84b4ca1e3ecdcab1760c7bc202"} Dec 11 09:40:01 crc kubenswrapper[4788]: I1211 09:40:01.146462 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" event={"ID":"2c3c5958-426b-4823-b239-5754a3f12a76","Type":"ContainerStarted","Data":"94a543097e21757da6a81fa1d5f414f51ad2688e2e0bfcb6aa7106c916e81b53"} Dec 11 09:40:01 crc kubenswrapper[4788]: I1211 09:40:01.149548 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" event={"ID":"ff1bab13-85e5-44f0-99c9-84dedc7f67f0","Type":"ContainerStarted","Data":"60352db15d7ec802df8b3fbfa0a7b5a95c1b2c6238e97f5ebc9c2fbee7cc3db4"} Dec 11 09:40:01 crc kubenswrapper[4788]: I1211 09:40:01.152886 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be","Type":"ContainerStarted","Data":"a1e0b2ea91b01e4a7e8dedcb8448f8a5f6fa35ed625934aa1efc5d1c4d67cde2"} Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.032622 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.034920 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.037451 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.037693 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.037829 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.038126 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.039214 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-87pc6" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.040666 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.178706 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/428e76c9-65ed-434c-a25d-6bcd956b48d5-config\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.178800 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.178826 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/428e76c9-65ed-434c-a25d-6bcd956b48d5-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.178862 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/428e76c9-65ed-434c-a25d-6bcd956b48d5-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.178950 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/428e76c9-65ed-434c-a25d-6bcd956b48d5-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.178976 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d88ks\" (UniqueName: \"kubernetes.io/projected/428e76c9-65ed-434c-a25d-6bcd956b48d5-kube-api-access-d88ks\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.179173 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/428e76c9-65ed-434c-a25d-6bcd956b48d5-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.179202 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/428e76c9-65ed-434c-a25d-6bcd956b48d5-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.281102 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.281168 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/428e76c9-65ed-434c-a25d-6bcd956b48d5-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.281261 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/428e76c9-65ed-434c-a25d-6bcd956b48d5-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.281329 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/428e76c9-65ed-434c-a25d-6bcd956b48d5-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.281357 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d88ks\" (UniqueName: \"kubernetes.io/projected/428e76c9-65ed-434c-a25d-6bcd956b48d5-kube-api-access-d88ks\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.281412 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/428e76c9-65ed-434c-a25d-6bcd956b48d5-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.281433 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/428e76c9-65ed-434c-a25d-6bcd956b48d5-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.281491 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/428e76c9-65ed-434c-a25d-6bcd956b48d5-config\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.282360 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.282696 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/428e76c9-65ed-434c-a25d-6bcd956b48d5-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.283204 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/428e76c9-65ed-434c-a25d-6bcd956b48d5-config\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.285982 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/428e76c9-65ed-434c-a25d-6bcd956b48d5-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.290393 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/428e76c9-65ed-434c-a25d-6bcd956b48d5-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.290522 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/428e76c9-65ed-434c-a25d-6bcd956b48d5-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.302582 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d88ks\" (UniqueName: \"kubernetes.io/projected/428e76c9-65ed-434c-a25d-6bcd956b48d5-kube-api-access-d88ks\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.305977 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/428e76c9-65ed-434c-a25d-6bcd956b48d5-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.316094 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"428e76c9-65ed-434c-a25d-6bcd956b48d5\") " pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.365638 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.550362 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-5hgrc"] Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.551951 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.556805 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.556878 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-8v9sd" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.558479 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.562928 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-7474g"] Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.565544 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.586921 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-5hgrc"] Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.602891 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-7474g"] Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690391 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7xxm\" (UniqueName: \"kubernetes.io/projected/ffcb62ce-c938-498b-9026-8fe40512245a-kube-api-access-n7xxm\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690513 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b899f552-09d4-4919-a3f1-79ff044cd435-scripts\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690582 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ffcb62ce-c938-498b-9026-8fe40512245a-scripts\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690631 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b899f552-09d4-4919-a3f1-79ff044cd435-var-run\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690680 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-var-lib\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690720 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-etc-ovs\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690774 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b899f552-09d4-4919-a3f1-79ff044cd435-combined-ca-bundle\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690798 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-var-log\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690856 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b899f552-09d4-4919-a3f1-79ff044cd435-var-run-ovn\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690890 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b899f552-09d4-4919-a3f1-79ff044cd435-var-log-ovn\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690957 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-var-run\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.690999 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/b899f552-09d4-4919-a3f1-79ff044cd435-ovn-controller-tls-certs\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.691059 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jsqd\" (UniqueName: \"kubernetes.io/projected/b899f552-09d4-4919-a3f1-79ff044cd435-kube-api-access-6jsqd\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.792643 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7xxm\" (UniqueName: \"kubernetes.io/projected/ffcb62ce-c938-498b-9026-8fe40512245a-kube-api-access-n7xxm\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.792715 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b899f552-09d4-4919-a3f1-79ff044cd435-scripts\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.795309 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b899f552-09d4-4919-a3f1-79ff044cd435-scripts\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.795481 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ffcb62ce-c938-498b-9026-8fe40512245a-scripts\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.796515 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b899f552-09d4-4919-a3f1-79ff044cd435-var-run\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.797697 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ffcb62ce-c938-498b-9026-8fe40512245a-scripts\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.797764 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b899f552-09d4-4919-a3f1-79ff044cd435-var-run\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.797831 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-var-lib\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.798074 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-var-lib\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.798141 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-etc-ovs\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.798278 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b899f552-09d4-4919-a3f1-79ff044cd435-combined-ca-bundle\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.798317 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-var-log\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.798420 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b899f552-09d4-4919-a3f1-79ff044cd435-var-run-ovn\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.798448 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b899f552-09d4-4919-a3f1-79ff044cd435-var-log-ovn\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.798966 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b899f552-09d4-4919-a3f1-79ff044cd435-var-run-ovn\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.799199 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-var-log\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.799434 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b899f552-09d4-4919-a3f1-79ff044cd435-var-log-ovn\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.799662 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-var-run\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.799747 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/b899f552-09d4-4919-a3f1-79ff044cd435-ovn-controller-tls-certs\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.799863 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jsqd\" (UniqueName: \"kubernetes.io/projected/b899f552-09d4-4919-a3f1-79ff044cd435-kube-api-access-6jsqd\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.800383 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-var-run\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.800715 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/ffcb62ce-c938-498b-9026-8fe40512245a-etc-ovs\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.807315 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b899f552-09d4-4919-a3f1-79ff044cd435-combined-ca-bundle\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.810218 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/b899f552-09d4-4919-a3f1-79ff044cd435-ovn-controller-tls-certs\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.816145 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7xxm\" (UniqueName: \"kubernetes.io/projected/ffcb62ce-c938-498b-9026-8fe40512245a-kube-api-access-n7xxm\") pod \"ovn-controller-ovs-7474g\" (UID: \"ffcb62ce-c938-498b-9026-8fe40512245a\") " pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.833732 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jsqd\" (UniqueName: \"kubernetes.io/projected/b899f552-09d4-4919-a3f1-79ff044cd435-kube-api-access-6jsqd\") pod \"ovn-controller-5hgrc\" (UID: \"b899f552-09d4-4919-a3f1-79ff044cd435\") " pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.877634 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:03 crc kubenswrapper[4788]: I1211 09:40:03.893629 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:40:04 crc kubenswrapper[4788]: I1211 09:40:04.041609 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 11 09:40:04 crc kubenswrapper[4788]: I1211 09:40:04.180527 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"428e76c9-65ed-434c-a25d-6bcd956b48d5","Type":"ContainerStarted","Data":"e26c99f3b16c766d84fcbde95b74aca0d78331916178bd18d674375f90fa7b97"} Dec 11 09:40:04 crc kubenswrapper[4788]: I1211 09:40:04.430667 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-5hgrc"] Dec 11 09:40:04 crc kubenswrapper[4788]: W1211 09:40:04.446190 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb899f552_09d4_4919_a3f1_79ff044cd435.slice/crio-c68d5af59def842910da5ccfc434bf5ed6f69e1a866f0754de095087f160b6d7 WatchSource:0}: Error finding container c68d5af59def842910da5ccfc434bf5ed6f69e1a866f0754de095087f160b6d7: Status 404 returned error can't find the container with id c68d5af59def842910da5ccfc434bf5ed6f69e1a866f0754de095087f160b6d7 Dec 11 09:40:04 crc kubenswrapper[4788]: I1211 09:40:04.935444 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-7474g"] Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.192658 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-5hgrc" event={"ID":"b899f552-09d4-4919-a3f1-79ff044cd435","Type":"ContainerStarted","Data":"c68d5af59def842910da5ccfc434bf5ed6f69e1a866f0754de095087f160b6d7"} Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.555786 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-gpspm"] Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.557920 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.561591 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.575452 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-gpspm"] Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.737821 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f000044f-b0a7-417e-8278-5deb090b8105-ovs-rundir\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.737932 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f000044f-b0a7-417e-8278-5deb090b8105-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.737990 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f000044f-b0a7-417e-8278-5deb090b8105-config\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.738007 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f000044f-b0a7-417e-8278-5deb090b8105-ovn-rundir\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.738039 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f000044f-b0a7-417e-8278-5deb090b8105-combined-ca-bundle\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.738060 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9d42\" (UniqueName: \"kubernetes.io/projected/f000044f-b0a7-417e-8278-5deb090b8105-kube-api-access-q9d42\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.839082 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f000044f-b0a7-417e-8278-5deb090b8105-ovs-rundir\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.839189 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f000044f-b0a7-417e-8278-5deb090b8105-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.839254 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f000044f-b0a7-417e-8278-5deb090b8105-config\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.839275 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f000044f-b0a7-417e-8278-5deb090b8105-ovn-rundir\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.839303 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f000044f-b0a7-417e-8278-5deb090b8105-combined-ca-bundle\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.839321 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9d42\" (UniqueName: \"kubernetes.io/projected/f000044f-b0a7-417e-8278-5deb090b8105-kube-api-access-q9d42\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.839718 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f000044f-b0a7-417e-8278-5deb090b8105-ovs-rundir\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.839898 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f000044f-b0a7-417e-8278-5deb090b8105-ovn-rundir\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.840302 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f000044f-b0a7-417e-8278-5deb090b8105-config\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.864184 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f000044f-b0a7-417e-8278-5deb090b8105-combined-ca-bundle\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.864265 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9d42\" (UniqueName: \"kubernetes.io/projected/f000044f-b0a7-417e-8278-5deb090b8105-kube-api-access-q9d42\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:05 crc kubenswrapper[4788]: I1211 09:40:05.887072 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f000044f-b0a7-417e-8278-5deb090b8105-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-gpspm\" (UID: \"f000044f-b0a7-417e-8278-5deb090b8105\") " pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.064699 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8bf9n"] Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.085409 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-f7mj4"] Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.088102 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.091681 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-f7mj4"] Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.092398 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.185556 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-gpspm" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.249058 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.249303 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz5zf\" (UniqueName: \"kubernetes.io/projected/c71b12a7-3e8a-418f-9bbb-c95851134efd-kube-api-access-qz5zf\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.249348 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-config\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.249544 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.353277 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.353355 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.353380 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz5zf\" (UniqueName: \"kubernetes.io/projected/c71b12a7-3e8a-418f-9bbb-c95851134efd-kube-api-access-qz5zf\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.353401 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-config\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.354216 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-config\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.354308 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.354362 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.411460 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz5zf\" (UniqueName: \"kubernetes.io/projected/c71b12a7-3e8a-418f-9bbb-c95851134efd-kube-api-access-qz5zf\") pod \"dnsmasq-dns-6bc7876d45-f7mj4\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.464464 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.787056 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.791729 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.809130 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-wlksf" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.809206 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.809421 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.809555 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.812482 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.871682 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3858a264-6dc8-4a58-8e80-3d57649da896-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.871776 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3858a264-6dc8-4a58-8e80-3d57649da896-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.871856 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3858a264-6dc8-4a58-8e80-3d57649da896-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.871904 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3858a264-6dc8-4a58-8e80-3d57649da896-config\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.871954 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.871979 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3858a264-6dc8-4a58-8e80-3d57649da896-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.872047 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3858a264-6dc8-4a58-8e80-3d57649da896-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.872143 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kxfw\" (UniqueName: \"kubernetes.io/projected/3858a264-6dc8-4a58-8e80-3d57649da896-kube-api-access-9kxfw\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.974152 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3858a264-6dc8-4a58-8e80-3d57649da896-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.974343 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3858a264-6dc8-4a58-8e80-3d57649da896-config\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.974416 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.974443 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3858a264-6dc8-4a58-8e80-3d57649da896-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.974522 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3858a264-6dc8-4a58-8e80-3d57649da896-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.974569 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kxfw\" (UniqueName: \"kubernetes.io/projected/3858a264-6dc8-4a58-8e80-3d57649da896-kube-api-access-9kxfw\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.974642 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3858a264-6dc8-4a58-8e80-3d57649da896-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.974662 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3858a264-6dc8-4a58-8e80-3d57649da896-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.974658 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3858a264-6dc8-4a58-8e80-3d57649da896-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.974987 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.976159 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3858a264-6dc8-4a58-8e80-3d57649da896-config\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.979260 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3858a264-6dc8-4a58-8e80-3d57649da896-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.988913 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3858a264-6dc8-4a58-8e80-3d57649da896-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:06 crc kubenswrapper[4788]: I1211 09:40:06.992838 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3858a264-6dc8-4a58-8e80-3d57649da896-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:07 crc kubenswrapper[4788]: I1211 09:40:06.996849 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3858a264-6dc8-4a58-8e80-3d57649da896-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:07 crc kubenswrapper[4788]: I1211 09:40:07.001434 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kxfw\" (UniqueName: \"kubernetes.io/projected/3858a264-6dc8-4a58-8e80-3d57649da896-kube-api-access-9kxfw\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:07 crc kubenswrapper[4788]: I1211 09:40:07.004286 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3858a264-6dc8-4a58-8e80-3d57649da896\") " pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:07 crc kubenswrapper[4788]: I1211 09:40:07.126213 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 11 09:40:13 crc kubenswrapper[4788]: W1211 09:40:13.743879 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffcb62ce_c938_498b_9026_8fe40512245a.slice/crio-e8b27037518fe1cc13a4462c04f9360369abf6a9649fdbfb30589a8d24305bc2 WatchSource:0}: Error finding container e8b27037518fe1cc13a4462c04f9360369abf6a9649fdbfb30589a8d24305bc2: Status 404 returned error can't find the container with id e8b27037518fe1cc13a4462c04f9360369abf6a9649fdbfb30589a8d24305bc2 Dec 11 09:40:13 crc kubenswrapper[4788]: I1211 09:40:13.853670 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7474g" event={"ID":"ffcb62ce-c938-498b-9026-8fe40512245a","Type":"ContainerStarted","Data":"e8b27037518fe1cc13a4462c04f9360369abf6a9649fdbfb30589a8d24305bc2"} Dec 11 09:40:21 crc kubenswrapper[4788]: I1211 09:40:21.368967 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:40:21 crc kubenswrapper[4788]: I1211 09:40:21.369694 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:40:21 crc kubenswrapper[4788]: I1211 09:40:21.369778 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:40:21 crc kubenswrapper[4788]: I1211 09:40:21.370553 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"45b7a6743f34908def35f21206c5184bf57302d15d03214ff0c0f9850ad5924d"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 09:40:21 crc kubenswrapper[4788]: I1211 09:40:21.370616 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://45b7a6743f34908def35f21206c5184bf57302d15d03214ff0c0f9850ad5924d" gracePeriod=600 Dec 11 09:40:23 crc kubenswrapper[4788]: I1211 09:40:23.922738 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="45b7a6743f34908def35f21206c5184bf57302d15d03214ff0c0f9850ad5924d" exitCode=0 Dec 11 09:40:23 crc kubenswrapper[4788]: I1211 09:40:23.922783 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"45b7a6743f34908def35f21206c5184bf57302d15d03214ff0c0f9850ad5924d"} Dec 11 09:40:23 crc kubenswrapper[4788]: I1211 09:40:23.923090 4788 scope.go:117] "RemoveContainer" containerID="5b3f817de5e2b917218a768a7a64c16521218e5cdb66d80136d52d5195848c43" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.128940 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7: Get \"https://quay.io/v2/podified-antelope-centos9/openstack-ovn-controller/blobs/sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7\": context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.130160 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:f1cd92dd4d4577463d8f4ef8228de2f046b0e20a5abe5c22d26bde41fe0be66b: Get \"https://quay.io/v2/podified-antelope-centos9/openstack-neutron-server/blobs/sha256:f1cd92dd4d4577463d8f4ef8228de2f046b0e20a5abe5c22d26bde41fe0be66b\": context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.130664 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-79tj6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5ccc8479f9-zs5cl_openstack(35f27cf1-e2d1-4f41-9b9b-20f0da6b2621): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:f1cd92dd4d4577463d8f4ef8228de2f046b0e20a5abe5c22d26bde41fe0be66b: Get \"https://quay.io/v2/podified-antelope-centos9/openstack-neutron-server/blobs/sha256:f1cd92dd4d4577463d8f4ef8228de2f046b0e20a5abe5c22d26bde41fe0be66b\": context canceled" logger="UnhandledError" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.130667 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovn-controller,Image:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,Command:[ovn-controller --pidfile unix:/run/openvswitch/db.sock --certificate=/etc/pki/tls/certs/ovndb.crt --private-key=/etc/pki/tls/private/ovndb.key --ca-cert=/etc/pki/tls/certs/ovndbca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n599hfdh77h66dh66h667h5d7h5d6h5fh549hc7h5h5dbh6dhdch5c5hcdhdbh686h546hbdh84hf6h657h587hc7h64bh667h5c7h65bh5f8h677q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run-ovn,ReadOnly:false,MountPath:/var/run/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log-ovn,ReadOnly:false,MountPath:/var/log/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6jsqd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_liveness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_readiness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/share/ovn/scripts/ovn-ctl stop_controller],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-5hgrc_openstack(b899f552-09d4-4919-a3f1-79ff044cd435): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7: Get \"https://quay.io/v2/podified-antelope-centos9/openstack-ovn-controller/blobs/sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7\": context canceled" logger="UnhandledError" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.131857 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7: Get \\\"https://quay.io/v2/podified-antelope-centos9/openstack-ovn-controller/blobs/sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7\\\": context canceled\"" pod="openstack/ovn-controller-5hgrc" podUID="b899f552-09d4-4919-a3f1-79ff044cd435" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.131863 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:f1cd92dd4d4577463d8f4ef8228de2f046b0e20a5abe5c22d26bde41fe0be66b: Get \\\"https://quay.io/v2/podified-antelope-centos9/openstack-neutron-server/blobs/sha256:f1cd92dd4d4577463d8f4ef8228de2f046b0e20a5abe5c22d26bde41fe0be66b\\\": context canceled\"" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" podUID="35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.150747 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.151264 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5mt7w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(2552ec58-e76a-4c17-ab79-ac237c6d972c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.152585 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="2552ec58-e76a-4c17-ab79-ac237c6d972c" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.305338 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified\\\"\"" pod="openstack/ovn-controller-5hgrc" podUID="b899f552-09d4-4919-a3f1-79ff044cd435" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.305608 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" podUID="35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" Dec 11 09:40:37 crc kubenswrapper[4788]: E1211 09:40:37.305685 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="2552ec58-e76a-4c17-ab79-ac237c6d972c" Dec 11 09:40:40 crc kubenswrapper[4788]: E1211 09:40:40.307438 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 11 09:40:40 crc kubenswrapper[4788]: E1211 09:40:40.307650 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jm9g5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(c377af1f-84a0-4b96-8fa2-0f66751cd40d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:40:40 crc kubenswrapper[4788]: E1211 09:40:40.309494 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" Dec 11 09:40:40 crc kubenswrapper[4788]: E1211 09:40:40.329931 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" Dec 11 09:40:41 crc kubenswrapper[4788]: E1211 09:40:41.076102 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7: Get \"https://quay.io/v2/podified-antelope-centos9/openstack-ovn-sb-db-server/blobs/sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7\": context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified" Dec 11 09:40:41 crc kubenswrapper[4788]: E1211 09:40:41.076650 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-sb,Image:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5dbhbfhbbh56dhc6hd8h5c8h66fh68fh67ch97hf5h698h64bh4h5cdh59dh9hc4h8ch65dh658h9dhc7h649h5f5h54bh5d9hfch67fh67h5b7q,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-sb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d88ks,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(428e76c9-65ed-434c-a25d-6bcd956b48d5): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7: Get \"https://quay.io/v2/podified-antelope-centos9/openstack-ovn-sb-db-server/blobs/sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7\": context canceled" logger="UnhandledError" Dec 11 09:40:41 crc kubenswrapper[4788]: E1211 09:40:41.080161 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Dec 11 09:40:41 crc kubenswrapper[4788]: E1211 09:40:41.080475 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n5d6h675h7dh655h644h679h556h594h5c9hf6hc9h68ch97h654h595h77h694h664h5c5hdhc8h5bchc9h7h5f7h8h657h678h5dbh57fh86h578q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9hlmr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(eba9b8bc-1fe1-4ba9-9521-a21c25bed6be): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:40:41 crc kubenswrapper[4788]: E1211 09:40:41.081782 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="eba9b8bc-1fe1-4ba9-9521-a21c25bed6be" Dec 11 09:40:41 crc kubenswrapper[4788]: E1211 09:40:41.336791 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="eba9b8bc-1fe1-4ba9-9521-a21c25bed6be" Dec 11 09:40:51 crc kubenswrapper[4788]: E1211 09:40:51.537946 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified" Dec 11 09:40:51 crc kubenswrapper[4788]: E1211 09:40:51.538669 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:ovsdb-server-init,Image:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,Command:[/usr/local/bin/container-scripts/init-ovsdb-server.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n599hfdh77h66dh66h667h5d7h5d6h5fh549hc7h5h5dbh6dhdch5c5hcdhdbh686h546hbdh84hf6h657h587hc7h64bh667h5c7h65bh5f8h677q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-ovs,ReadOnly:false,MountPath:/etc/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log,ReadOnly:false,MountPath:/var/log/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib,ReadOnly:false,MountPath:/var/lib/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n7xxm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-ovs-7474g_openstack(ffcb62ce-c938-498b-9026-8fe40512245a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:40:51 crc kubenswrapper[4788]: E1211 09:40:51.540000 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-ovs-7474g" podUID="ffcb62ce-c938-498b-9026-8fe40512245a" Dec 11 09:40:52 crc kubenswrapper[4788]: E1211 09:40:52.376531 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 11 09:40:52 crc kubenswrapper[4788]: E1211 09:40:52.377035 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wgkxm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-8bf9n_openstack(ff1bab13-85e5-44f0-99c9-84dedc7f67f0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:40:52 crc kubenswrapper[4788]: E1211 09:40:52.378834 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" podUID="ff1bab13-85e5-44f0-99c9-84dedc7f67f0" Dec 11 09:40:52 crc kubenswrapper[4788]: E1211 09:40:52.439238 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified\\\"\"" pod="openstack/ovn-controller-ovs-7474g" podUID="ffcb62ce-c938-498b-9026-8fe40512245a" Dec 11 09:40:52 crc kubenswrapper[4788]: E1211 09:40:52.757749 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 11 09:40:52 crc kubenswrapper[4788]: E1211 09:40:52.758032 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sjt99,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-fkn27_openstack(2c3c5958-426b-4823-b239-5754a3f12a76): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:40:52 crc kubenswrapper[4788]: E1211 09:40:52.760006 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" podUID="2c3c5958-426b-4823-b239-5754a3f12a76" Dec 11 09:40:52 crc kubenswrapper[4788]: E1211 09:40:52.770533 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 11 09:40:52 crc kubenswrapper[4788]: E1211 09:40:52.770935 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dwzzb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-p2kk8_openstack(ef438d31-b79d-4f26-8676-ae6907d29629): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:40:52 crc kubenswrapper[4788]: E1211 09:40:52.772481 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" podUID="ef438d31-b79d-4f26-8676-ae6907d29629" Dec 11 09:40:53 crc kubenswrapper[4788]: I1211 09:40:53.209977 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 11 09:40:53 crc kubenswrapper[4788]: I1211 09:40:53.221664 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-f7mj4"] Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.459380 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" event={"ID":"ff1bab13-85e5-44f0-99c9-84dedc7f67f0","Type":"ContainerDied","Data":"60352db15d7ec802df8b3fbfa0a7b5a95c1b2c6238e97f5ebc9c2fbee7cc3db4"} Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.460317 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60352db15d7ec802df8b3fbfa0a7b5a95c1b2c6238e97f5ebc9c2fbee7cc3db4" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.476435 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.558660 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgkxm\" (UniqueName: \"kubernetes.io/projected/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-kube-api-access-wgkxm\") pod \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.559404 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-config\") pod \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.560592 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-dns-svc\") pod \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\" (UID: \"ff1bab13-85e5-44f0-99c9-84dedc7f67f0\") " Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.562007 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-config" (OuterVolumeSpecName: "config") pod "ff1bab13-85e5-44f0-99c9-84dedc7f67f0" (UID: "ff1bab13-85e5-44f0-99c9-84dedc7f67f0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.563058 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ff1bab13-85e5-44f0-99c9-84dedc7f67f0" (UID: "ff1bab13-85e5-44f0-99c9-84dedc7f67f0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.589504 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-kube-api-access-wgkxm" (OuterVolumeSpecName: "kube-api-access-wgkxm") pod "ff1bab13-85e5-44f0-99c9-84dedc7f67f0" (UID: "ff1bab13-85e5-44f0-99c9-84dedc7f67f0"). InnerVolumeSpecName "kube-api-access-wgkxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.662932 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.663006 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.663017 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgkxm\" (UniqueName: \"kubernetes.io/projected/ff1bab13-85e5-44f0-99c9-84dedc7f67f0-kube-api-access-wgkxm\") on node \"crc\" DevicePath \"\"" Dec 11 09:40:54 crc kubenswrapper[4788]: W1211 09:40:54.786949 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3858a264_6dc8_4a58_8e80_3d57649da896.slice/crio-6ae3889cd1da651191dbec0b83302b3129bfc3fecf5839418367c18a23f1c856 WatchSource:0}: Error finding container 6ae3889cd1da651191dbec0b83302b3129bfc3fecf5839418367c18a23f1c856: Status 404 returned error can't find the container with id 6ae3889cd1da651191dbec0b83302b3129bfc3fecf5839418367c18a23f1c856 Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.848746 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-gpspm"] Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.870104 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.881810 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.967954 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef438d31-b79d-4f26-8676-ae6907d29629-config\") pod \"ef438d31-b79d-4f26-8676-ae6907d29629\" (UID: \"ef438d31-b79d-4f26-8676-ae6907d29629\") " Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.968078 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-dns-svc\") pod \"2c3c5958-426b-4823-b239-5754a3f12a76\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.968154 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjt99\" (UniqueName: \"kubernetes.io/projected/2c3c5958-426b-4823-b239-5754a3f12a76-kube-api-access-sjt99\") pod \"2c3c5958-426b-4823-b239-5754a3f12a76\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.968220 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwzzb\" (UniqueName: \"kubernetes.io/projected/ef438d31-b79d-4f26-8676-ae6907d29629-kube-api-access-dwzzb\") pod \"ef438d31-b79d-4f26-8676-ae6907d29629\" (UID: \"ef438d31-b79d-4f26-8676-ae6907d29629\") " Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.968274 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-config\") pod \"2c3c5958-426b-4823-b239-5754a3f12a76\" (UID: \"2c3c5958-426b-4823-b239-5754a3f12a76\") " Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.968585 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef438d31-b79d-4f26-8676-ae6907d29629-config" (OuterVolumeSpecName: "config") pod "ef438d31-b79d-4f26-8676-ae6907d29629" (UID: "ef438d31-b79d-4f26-8676-ae6907d29629"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.968798 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef438d31-b79d-4f26-8676-ae6907d29629-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.969559 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2c3c5958-426b-4823-b239-5754a3f12a76" (UID: "2c3c5958-426b-4823-b239-5754a3f12a76"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.970516 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-config" (OuterVolumeSpecName: "config") pod "2c3c5958-426b-4823-b239-5754a3f12a76" (UID: "2c3c5958-426b-4823-b239-5754a3f12a76"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.972902 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef438d31-b79d-4f26-8676-ae6907d29629-kube-api-access-dwzzb" (OuterVolumeSpecName: "kube-api-access-dwzzb") pod "ef438d31-b79d-4f26-8676-ae6907d29629" (UID: "ef438d31-b79d-4f26-8676-ae6907d29629"). InnerVolumeSpecName "kube-api-access-dwzzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:40:54 crc kubenswrapper[4788]: I1211 09:40:54.978474 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c3c5958-426b-4823-b239-5754a3f12a76-kube-api-access-sjt99" (OuterVolumeSpecName: "kube-api-access-sjt99") pod "2c3c5958-426b-4823-b239-5754a3f12a76" (UID: "2c3c5958-426b-4823-b239-5754a3f12a76"). InnerVolumeSpecName "kube-api-access-sjt99". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.070088 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjt99\" (UniqueName: \"kubernetes.io/projected/2c3c5958-426b-4823-b239-5754a3f12a76-kube-api-access-sjt99\") on node \"crc\" DevicePath \"\"" Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.070123 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwzzb\" (UniqueName: \"kubernetes.io/projected/ef438d31-b79d-4f26-8676-ae6907d29629-kube-api-access-dwzzb\") on node \"crc\" DevicePath \"\"" Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.070134 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.070143 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3c5958-426b-4823-b239-5754a3f12a76-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:40:55 crc kubenswrapper[4788]: W1211 09:40:55.191542 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf000044f_b0a7_417e_8278_5deb090b8105.slice/crio-4d642aa75aea8c5e59e727627940b3df960b2736f1613feaf4360fb72054b430 WatchSource:0}: Error finding container 4d642aa75aea8c5e59e727627940b3df960b2736f1613feaf4360fb72054b430: Status 404 returned error can't find the container with id 4d642aa75aea8c5e59e727627940b3df960b2736f1613feaf4360fb72054b430 Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.472001 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" event={"ID":"2c3c5958-426b-4823-b239-5754a3f12a76","Type":"ContainerDied","Data":"94a543097e21757da6a81fa1d5f414f51ad2688e2e0bfcb6aa7106c916e81b53"} Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.472594 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-fkn27" Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.477097 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-gpspm" event={"ID":"f000044f-b0a7-417e-8278-5deb090b8105","Type":"ContainerStarted","Data":"4d642aa75aea8c5e59e727627940b3df960b2736f1613feaf4360fb72054b430"} Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.478258 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.478257 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-p2kk8" event={"ID":"ef438d31-b79d-4f26-8676-ae6907d29629","Type":"ContainerDied","Data":"85815402bc945ad98e94905ce1f83170f7d3e01b008914df383338f8110ab726"} Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.481875 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3858a264-6dc8-4a58-8e80-3d57649da896","Type":"ContainerStarted","Data":"6ae3889cd1da651191dbec0b83302b3129bfc3fecf5839418367c18a23f1c856"} Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.483066 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" event={"ID":"c71b12a7-3e8a-418f-9bbb-c95851134efd","Type":"ContainerStarted","Data":"bbf6c1d6565bfcd703f293cfc8a23418e924c06beb0f125c8a009bfa3475d77a"} Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.489352 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-8bf9n" Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.489435 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"049e54d0d48c2b5f9432cbc0d1c729dca1d9ca44054024186e494a3f6fa981a9"} Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.595664 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-fkn27"] Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.604490 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-fkn27"] Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.664764 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-p2kk8"] Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.683315 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-p2kk8"] Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.701362 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8bf9n"] Dec 11 09:40:55 crc kubenswrapper[4788]: I1211 09:40:55.707649 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-8bf9n"] Dec 11 09:40:55 crc kubenswrapper[4788]: E1211 09:40:55.826971 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 11 09:40:55 crc kubenswrapper[4788]: E1211 09:40:55.827292 4788 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 11 09:40:55 crc kubenswrapper[4788]: E1211 09:40:55.827466 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8hzdg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(dd95dacb-1402-422f-a9c0-f1e8a4b6d01e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" logger="UnhandledError" Dec 11 09:40:55 crc kubenswrapper[4788]: E1211 09:40:55.828803 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="dd95dacb-1402-422f-a9c0-f1e8a4b6d01e" Dec 11 09:40:56 crc kubenswrapper[4788]: E1211 09:40:56.497450 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="dd95dacb-1402-422f-a9c0-f1e8a4b6d01e" Dec 11 09:40:56 crc kubenswrapper[4788]: I1211 09:40:56.507293 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c3c5958-426b-4823-b239-5754a3f12a76" path="/var/lib/kubelet/pods/2c3c5958-426b-4823-b239-5754a3f12a76/volumes" Dec 11 09:40:56 crc kubenswrapper[4788]: I1211 09:40:56.507907 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef438d31-b79d-4f26-8676-ae6907d29629" path="/var/lib/kubelet/pods/ef438d31-b79d-4f26-8676-ae6907d29629/volumes" Dec 11 09:40:56 crc kubenswrapper[4788]: I1211 09:40:56.508364 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff1bab13-85e5-44f0-99c9-84dedc7f67f0" path="/var/lib/kubelet/pods/ff1bab13-85e5-44f0-99c9-84dedc7f67f0/volumes" Dec 11 09:40:57 crc kubenswrapper[4788]: I1211 09:40:57.507550 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2552ec58-e76a-4c17-ab79-ac237c6d972c","Type":"ContainerStarted","Data":"54c214ec4c566468ba2a61356766284b1e6ba62480de2dc42b4d6cbe0e1fbd9e"} Dec 11 09:40:57 crc kubenswrapper[4788]: I1211 09:40:57.509485 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f98e1b46-07d0-44d9-810c-4a778d44837d","Type":"ContainerStarted","Data":"8d4ea541d45b8a8179144d083ee97d31e212e48ebe9900994f8dac67c2a9c24d"} Dec 11 09:40:57 crc kubenswrapper[4788]: I1211 09:40:57.511380 4788 generic.go:334] "Generic (PLEG): container finished" podID="35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" containerID="7a1917b452fd8fb31c96b3487fc82f36e7a2c8b4ed0280d9e109bf376d2d7240" exitCode=0 Dec 11 09:40:57 crc kubenswrapper[4788]: I1211 09:40:57.511432 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" event={"ID":"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621","Type":"ContainerDied","Data":"7a1917b452fd8fb31c96b3487fc82f36e7a2c8b4ed0280d9e109bf376d2d7240"} Dec 11 09:40:57 crc kubenswrapper[4788]: I1211 09:40:57.513324 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-5hgrc" event={"ID":"b899f552-09d4-4919-a3f1-79ff044cd435","Type":"ContainerStarted","Data":"4ca3bf6f8dff956ace5be82fca0fe653535a66446cc28f5662996375bf4cf004"} Dec 11 09:40:57 crc kubenswrapper[4788]: I1211 09:40:57.513739 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-5hgrc" Dec 11 09:40:57 crc kubenswrapper[4788]: I1211 09:40:57.534749 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"eba9b8bc-1fe1-4ba9-9521-a21c25bed6be","Type":"ContainerStarted","Data":"24a2b1a4485332b0b059c8ef49e555e9e3838e6db8926e70a536bfe6c7f127ef"} Dec 11 09:40:57 crc kubenswrapper[4788]: I1211 09:40:57.535530 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 11 09:40:57 crc kubenswrapper[4788]: I1211 09:40:57.611324 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=5.095463154 podStartE2EDuration="1m0.611294969s" podCreationTimestamp="2025-12-11 09:39:57 +0000 UTC" firstStartedPulling="2025-12-11 09:40:00.517193416 +0000 UTC m=+1130.587973002" lastFinishedPulling="2025-12-11 09:40:56.033025231 +0000 UTC m=+1186.103804817" observedRunningTime="2025-12-11 09:40:57.604583789 +0000 UTC m=+1187.675363375" watchObservedRunningTime="2025-12-11 09:40:57.611294969 +0000 UTC m=+1187.682074555" Dec 11 09:40:57 crc kubenswrapper[4788]: E1211 09:40:57.626379 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7: Get \\\"https://quay.io/v2/podified-antelope-centos9/openstack-ovn-sb-db-server/blobs/sha256:98706c286da2c6fe28e9b8b1f32cd40bde3bda835fade711a62193fefd3575f7\\\": context canceled\"" pod="openstack/ovsdbserver-sb-0" podUID="428e76c9-65ed-434c-a25d-6bcd956b48d5" Dec 11 09:40:57 crc kubenswrapper[4788]: I1211 09:40:57.756347 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-5hgrc" podStartSLOduration=3.308733469 podStartE2EDuration="54.756311609s" podCreationTimestamp="2025-12-11 09:40:03 +0000 UTC" firstStartedPulling="2025-12-11 09:40:04.452314222 +0000 UTC m=+1134.523093808" lastFinishedPulling="2025-12-11 09:40:55.899892362 +0000 UTC m=+1185.970671948" observedRunningTime="2025-12-11 09:40:57.740373744 +0000 UTC m=+1187.811153330" watchObservedRunningTime="2025-12-11 09:40:57.756311609 +0000 UTC m=+1187.827091195" Dec 11 09:40:58 crc kubenswrapper[4788]: I1211 09:40:58.557117 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c377af1f-84a0-4b96-8fa2-0f66751cd40d","Type":"ContainerStarted","Data":"512fd2cbdec642fe0e8d83e6ef7e1028976ab3008554b4f0c9736d8bf1910af3"} Dec 11 09:40:58 crc kubenswrapper[4788]: I1211 09:40:58.559728 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3858a264-6dc8-4a58-8e80-3d57649da896","Type":"ContainerStarted","Data":"e464b906aece9f5443b0aab8e6bbf7bbf82784b7d7ebe6d526fde0b9e5095b52"} Dec 11 09:40:58 crc kubenswrapper[4788]: I1211 09:40:58.561317 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef72aa19-1387-4180-957c-4bfec95e5562","Type":"ContainerStarted","Data":"ecdda98068d7c12b5c293a43670f5aa25750a921ae152c57a3c48bdaf6f07e5d"} Dec 11 09:40:58 crc kubenswrapper[4788]: I1211 09:40:58.572250 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"428e76c9-65ed-434c-a25d-6bcd956b48d5","Type":"ContainerStarted","Data":"540ef2e01cd8cb361077102f2db6c5886e81113eab081249638a5470f0947a4e"} Dec 11 09:40:58 crc kubenswrapper[4788]: I1211 09:40:58.574585 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-gpspm" event={"ID":"f000044f-b0a7-417e-8278-5deb090b8105","Type":"ContainerStarted","Data":"c85a1421af2eac8478eff5ed54c8135a8dcab5a9eb3184f8ab096d058a10b7bc"} Dec 11 09:40:59 crc kubenswrapper[4788]: I1211 09:40:59.584270 4788 generic.go:334] "Generic (PLEG): container finished" podID="c71b12a7-3e8a-418f-9bbb-c95851134efd" containerID="ced4a3b46624d16b793a89d60ec34421eb9df46b45ecc720e832ad7cceac2d2c" exitCode=0 Dec 11 09:40:59 crc kubenswrapper[4788]: I1211 09:40:59.584384 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" event={"ID":"c71b12a7-3e8a-418f-9bbb-c95851134efd","Type":"ContainerDied","Data":"ced4a3b46624d16b793a89d60ec34421eb9df46b45ecc720e832ad7cceac2d2c"} Dec 11 09:40:59 crc kubenswrapper[4788]: I1211 09:40:59.589679 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3858a264-6dc8-4a58-8e80-3d57649da896","Type":"ContainerStarted","Data":"e789df24109da92c80a506c43906c4ad022235c885389a242c351b6c78c17eb6"} Dec 11 09:40:59 crc kubenswrapper[4788]: I1211 09:40:59.592738 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" event={"ID":"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621","Type":"ContainerStarted","Data":"205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4"} Dec 11 09:40:59 crc kubenswrapper[4788]: I1211 09:40:59.593090 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:40:59 crc kubenswrapper[4788]: I1211 09:40:59.630138 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=52.506597651 podStartE2EDuration="54.630116068s" podCreationTimestamp="2025-12-11 09:40:05 +0000 UTC" firstStartedPulling="2025-12-11 09:40:54.79151057 +0000 UTC m=+1184.862290156" lastFinishedPulling="2025-12-11 09:40:56.915028987 +0000 UTC m=+1186.985808573" observedRunningTime="2025-12-11 09:40:59.626509976 +0000 UTC m=+1189.697289562" watchObservedRunningTime="2025-12-11 09:40:59.630116068 +0000 UTC m=+1189.700895654" Dec 11 09:40:59 crc kubenswrapper[4788]: I1211 09:40:59.645463 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" podStartSLOduration=12.175694632 podStartE2EDuration="1m7.645442867s" podCreationTimestamp="2025-12-11 09:39:52 +0000 UTC" firstStartedPulling="2025-12-11 09:40:00.431491001 +0000 UTC m=+1130.502270587" lastFinishedPulling="2025-12-11 09:40:55.901239236 +0000 UTC m=+1185.972018822" observedRunningTime="2025-12-11 09:40:59.644733929 +0000 UTC m=+1189.715513515" watchObservedRunningTime="2025-12-11 09:40:59.645442867 +0000 UTC m=+1189.716222463" Dec 11 09:40:59 crc kubenswrapper[4788]: I1211 09:40:59.675304 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-gpspm" podStartSLOduration=53.838584708 podStartE2EDuration="54.675266554s" podCreationTimestamp="2025-12-11 09:40:05 +0000 UTC" firstStartedPulling="2025-12-11 09:40:55.196484729 +0000 UTC m=+1185.267264325" lastFinishedPulling="2025-12-11 09:40:56.033166585 +0000 UTC m=+1186.103946171" observedRunningTime="2025-12-11 09:40:59.663134556 +0000 UTC m=+1189.733914142" watchObservedRunningTime="2025-12-11 09:40:59.675266554 +0000 UTC m=+1189.746046140" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.078946 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-zs5cl"] Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.107117 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-mp6nt"] Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.108582 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.115369 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.142131 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-mp6nt"] Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.241331 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x2bg\" (UniqueName: \"kubernetes.io/projected/d782ec20-acdd-4089-921f-dc2a69a2c2d6-kube-api-access-2x2bg\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.241406 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.241462 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.241515 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-config\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.241544 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-dns-svc\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.343194 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x2bg\" (UniqueName: \"kubernetes.io/projected/d782ec20-acdd-4089-921f-dc2a69a2c2d6-kube-api-access-2x2bg\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.343348 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.343401 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.343461 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-config\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.343489 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-dns-svc\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.344630 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.344788 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.344795 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-dns-svc\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.345251 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-config\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.364107 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x2bg\" (UniqueName: \"kubernetes.io/projected/d782ec20-acdd-4089-921f-dc2a69a2c2d6-kube-api-access-2x2bg\") pod \"dnsmasq-dns-8554648995-mp6nt\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.452212 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.610327 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" event={"ID":"c71b12a7-3e8a-418f-9bbb-c95851134efd","Type":"ContainerStarted","Data":"3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a"} Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.610396 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.617519 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"428e76c9-65ed-434c-a25d-6bcd956b48d5","Type":"ContainerStarted","Data":"c8ebf055a549ff3ebb7c157d4e14956a21e426b30877fb047257a680b9fefb22"} Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.632570 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" podStartSLOduration=53.459823275 podStartE2EDuration="54.631792521s" podCreationTimestamp="2025-12-11 09:40:06 +0000 UTC" firstStartedPulling="2025-12-11 09:40:54.835156858 +0000 UTC m=+1184.905936444" lastFinishedPulling="2025-12-11 09:40:56.007126104 +0000 UTC m=+1186.077905690" observedRunningTime="2025-12-11 09:41:00.630050307 +0000 UTC m=+1190.700829893" watchObservedRunningTime="2025-12-11 09:41:00.631792521 +0000 UTC m=+1190.702572107" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.947462 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.004445767 podStartE2EDuration="59.947443313s" podCreationTimestamp="2025-12-11 09:40:01 +0000 UTC" firstStartedPulling="2025-12-11 09:40:04.046356639 +0000 UTC m=+1134.117136215" lastFinishedPulling="2025-12-11 09:40:59.989354175 +0000 UTC m=+1190.060133761" observedRunningTime="2025-12-11 09:41:00.655440131 +0000 UTC m=+1190.726219737" watchObservedRunningTime="2025-12-11 09:41:00.947443313 +0000 UTC m=+1191.018222899" Dec 11 09:41:00 crc kubenswrapper[4788]: I1211 09:41:00.952667 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-mp6nt"] Dec 11 09:41:00 crc kubenswrapper[4788]: W1211 09:41:00.957081 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd782ec20_acdd_4089_921f_dc2a69a2c2d6.slice/crio-0c01fb99c89d3dc36daad1d1157c6a73a8a62a30cd8fc8c30183a682f853f893 WatchSource:0}: Error finding container 0c01fb99c89d3dc36daad1d1157c6a73a8a62a30cd8fc8c30183a682f853f893: Status 404 returned error can't find the container with id 0c01fb99c89d3dc36daad1d1157c6a73a8a62a30cd8fc8c30183a682f853f893 Dec 11 09:41:01 crc kubenswrapper[4788]: I1211 09:41:01.126365 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 11 09:41:01 crc kubenswrapper[4788]: I1211 09:41:01.181820 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 11 09:41:01 crc kubenswrapper[4788]: I1211 09:41:01.627540 4788 generic.go:334] "Generic (PLEG): container finished" podID="d782ec20-acdd-4089-921f-dc2a69a2c2d6" containerID="07f765b171f67ad63821ab32950f22ed6e4fac7ca855b86dad379fd8cadfbc82" exitCode=0 Dec 11 09:41:01 crc kubenswrapper[4788]: I1211 09:41:01.627632 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-mp6nt" event={"ID":"d782ec20-acdd-4089-921f-dc2a69a2c2d6","Type":"ContainerDied","Data":"07f765b171f67ad63821ab32950f22ed6e4fac7ca855b86dad379fd8cadfbc82"} Dec 11 09:41:01 crc kubenswrapper[4788]: I1211 09:41:01.627918 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-mp6nt" event={"ID":"d782ec20-acdd-4089-921f-dc2a69a2c2d6","Type":"ContainerStarted","Data":"0c01fb99c89d3dc36daad1d1157c6a73a8a62a30cd8fc8c30183a682f853f893"} Dec 11 09:41:01 crc kubenswrapper[4788]: I1211 09:41:01.630829 4788 generic.go:334] "Generic (PLEG): container finished" podID="f98e1b46-07d0-44d9-810c-4a778d44837d" containerID="8d4ea541d45b8a8179144d083ee97d31e212e48ebe9900994f8dac67c2a9c24d" exitCode=0 Dec 11 09:41:01 crc kubenswrapper[4788]: I1211 09:41:01.630953 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f98e1b46-07d0-44d9-810c-4a778d44837d","Type":"ContainerDied","Data":"8d4ea541d45b8a8179144d083ee97d31e212e48ebe9900994f8dac67c2a9c24d"} Dec 11 09:41:01 crc kubenswrapper[4788]: I1211 09:41:01.631242 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" podUID="35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" containerName="dnsmasq-dns" containerID="cri-o://205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4" gracePeriod=10 Dec 11 09:41:01 crc kubenswrapper[4788]: I1211 09:41:01.631596 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.058572 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.164895 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.186326 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-config\") pod \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.186649 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79tj6\" (UniqueName: \"kubernetes.io/projected/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-kube-api-access-79tj6\") pod \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.186712 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-dns-svc\") pod \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\" (UID: \"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621\") " Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.206594 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-kube-api-access-79tj6" (OuterVolumeSpecName: "kube-api-access-79tj6") pod "35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" (UID: "35f27cf1-e2d1-4f41-9b9b-20f0da6b2621"). InnerVolumeSpecName "kube-api-access-79tj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.254210 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" (UID: "35f27cf1-e2d1-4f41-9b9b-20f0da6b2621"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.258667 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-config" (OuterVolumeSpecName: "config") pod "35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" (UID: "35f27cf1-e2d1-4f41-9b9b-20f0da6b2621"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.289785 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79tj6\" (UniqueName: \"kubernetes.io/projected/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-kube-api-access-79tj6\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.289844 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.289855 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.646196 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f98e1b46-07d0-44d9-810c-4a778d44837d","Type":"ContainerStarted","Data":"af7f6a521b54a9a8fd4f90d01aae36d6be05b7ca982835f31e0c1c940e1fd89c"} Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.652714 4788 generic.go:334] "Generic (PLEG): container finished" podID="35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" containerID="205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4" exitCode=0 Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.652857 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" event={"ID":"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621","Type":"ContainerDied","Data":"205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4"} Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.652901 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" event={"ID":"35f27cf1-e2d1-4f41-9b9b-20f0da6b2621","Type":"ContainerDied","Data":"d89bd5d88eb86cfee0b027312d78adcc3afe3b0e2facddfb3d670bf335709def"} Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.652925 4788 scope.go:117] "RemoveContainer" containerID="205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.652886 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-zs5cl" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.673617 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-mp6nt" event={"ID":"d782ec20-acdd-4089-921f-dc2a69a2c2d6","Type":"ContainerStarted","Data":"8fa007ce452a53d4942867d43da868d9128e69120b2ce0e05d526b5d5b02f3ec"} Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.673707 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.676018 4788 generic.go:334] "Generic (PLEG): container finished" podID="2552ec58-e76a-4c17-ab79-ac237c6d972c" containerID="54c214ec4c566468ba2a61356766284b1e6ba62480de2dc42b4d6cbe0e1fbd9e" exitCode=0 Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.676129 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2552ec58-e76a-4c17-ab79-ac237c6d972c","Type":"ContainerDied","Data":"54c214ec4c566468ba2a61356766284b1e6ba62480de2dc42b4d6cbe0e1fbd9e"} Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.685083 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=15.491777028 podStartE2EDuration="1m7.685052414s" podCreationTimestamp="2025-12-11 09:39:55 +0000 UTC" firstStartedPulling="2025-12-11 09:40:00.487122153 +0000 UTC m=+1130.557901739" lastFinishedPulling="2025-12-11 09:40:52.680397539 +0000 UTC m=+1182.751177125" observedRunningTime="2025-12-11 09:41:02.682063258 +0000 UTC m=+1192.752842844" watchObservedRunningTime="2025-12-11 09:41:02.685052414 +0000 UTC m=+1192.755832000" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.700656 4788 scope.go:117] "RemoveContainer" containerID="7a1917b452fd8fb31c96b3487fc82f36e7a2c8b4ed0280d9e109bf376d2d7240" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.712209 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-zs5cl"] Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.727322 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-zs5cl"] Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.727463 4788 scope.go:117] "RemoveContainer" containerID="205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4" Dec 11 09:41:02 crc kubenswrapper[4788]: E1211 09:41:02.727791 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4\": container with ID starting with 205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4 not found: ID does not exist" containerID="205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.727822 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4"} err="failed to get container status \"205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4\": rpc error: code = NotFound desc = could not find container \"205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4\": container with ID starting with 205d1602ece29969199d5f4e0ee129f61ecec9bc63bcb55226c845b390ef60f4 not found: ID does not exist" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.727870 4788 scope.go:117] "RemoveContainer" containerID="7a1917b452fd8fb31c96b3487fc82f36e7a2c8b4ed0280d9e109bf376d2d7240" Dec 11 09:41:02 crc kubenswrapper[4788]: E1211 09:41:02.728039 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a1917b452fd8fb31c96b3487fc82f36e7a2c8b4ed0280d9e109bf376d2d7240\": container with ID starting with 7a1917b452fd8fb31c96b3487fc82f36e7a2c8b4ed0280d9e109bf376d2d7240 not found: ID does not exist" containerID="7a1917b452fd8fb31c96b3487fc82f36e7a2c8b4ed0280d9e109bf376d2d7240" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.728054 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a1917b452fd8fb31c96b3487fc82f36e7a2c8b4ed0280d9e109bf376d2d7240"} err="failed to get container status \"7a1917b452fd8fb31c96b3487fc82f36e7a2c8b4ed0280d9e109bf376d2d7240\": rpc error: code = NotFound desc = could not find container \"7a1917b452fd8fb31c96b3487fc82f36e7a2c8b4ed0280d9e109bf376d2d7240\": container with ID starting with 7a1917b452fd8fb31c96b3487fc82f36e7a2c8b4ed0280d9e109bf376d2d7240 not found: ID does not exist" Dec 11 09:41:02 crc kubenswrapper[4788]: I1211 09:41:02.729839 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-mp6nt" podStartSLOduration=2.72981747 podStartE2EDuration="2.72981747s" podCreationTimestamp="2025-12-11 09:41:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:41:02.716625955 +0000 UTC m=+1192.787405541" watchObservedRunningTime="2025-12-11 09:41:02.72981747 +0000 UTC m=+1192.800597056" Dec 11 09:41:03 crc kubenswrapper[4788]: I1211 09:41:03.365901 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 11 09:41:03 crc kubenswrapper[4788]: I1211 09:41:03.366449 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 11 09:41:03 crc kubenswrapper[4788]: I1211 09:41:03.416192 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 11 09:41:03 crc kubenswrapper[4788]: I1211 09:41:03.623457 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 11 09:41:03 crc kubenswrapper[4788]: I1211 09:41:03.685793 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"2552ec58-e76a-4c17-ab79-ac237c6d972c","Type":"ContainerStarted","Data":"9ac5c2a1a11364814a3e2505f2b43d2a29e2c81fa7776d67d829255aa4e0c213"} Dec 11 09:41:03 crc kubenswrapper[4788]: I1211 09:41:03.720823 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=14.889802208999999 podStartE2EDuration="1m9.720802992s" podCreationTimestamp="2025-12-11 09:39:54 +0000 UTC" firstStartedPulling="2025-12-11 09:40:00.445124217 +0000 UTC m=+1130.515903803" lastFinishedPulling="2025-12-11 09:40:55.276125 +0000 UTC m=+1185.346904586" observedRunningTime="2025-12-11 09:41:03.714385009 +0000 UTC m=+1193.785164615" watchObservedRunningTime="2025-12-11 09:41:03.720802992 +0000 UTC m=+1193.791582578" Dec 11 09:41:04 crc kubenswrapper[4788]: I1211 09:41:04.510816 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" path="/var/lib/kubelet/pods/35f27cf1-e2d1-4f41-9b9b-20f0da6b2621/volumes" Dec 11 09:41:05 crc kubenswrapper[4788]: I1211 09:41:05.757989 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7474g" event={"ID":"ffcb62ce-c938-498b-9026-8fe40512245a","Type":"ContainerStarted","Data":"87f977a23c631794f11e5949a8c94ec743d5194e4a3f4f939f1ff9f6118b75bb"} Dec 11 09:41:06 crc kubenswrapper[4788]: I1211 09:41:06.467460 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:41:06 crc kubenswrapper[4788]: I1211 09:41:06.767999 4788 generic.go:334] "Generic (PLEG): container finished" podID="ffcb62ce-c938-498b-9026-8fe40512245a" containerID="87f977a23c631794f11e5949a8c94ec743d5194e4a3f4f939f1ff9f6118b75bb" exitCode=0 Dec 11 09:41:06 crc kubenswrapper[4788]: I1211 09:41:06.768047 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7474g" event={"ID":"ffcb62ce-c938-498b-9026-8fe40512245a","Type":"ContainerDied","Data":"87f977a23c631794f11e5949a8c94ec743d5194e4a3f4f939f1ff9f6118b75bb"} Dec 11 09:41:07 crc kubenswrapper[4788]: I1211 09:41:07.781570 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7474g" event={"ID":"ffcb62ce-c938-498b-9026-8fe40512245a","Type":"ContainerStarted","Data":"03c0ed7c6a3d42b4668e8ed66678bb1256b267c5161bf44e75202f186c22889c"} Dec 11 09:41:07 crc kubenswrapper[4788]: I1211 09:41:07.781912 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7474g" event={"ID":"ffcb62ce-c938-498b-9026-8fe40512245a","Type":"ContainerStarted","Data":"0b8ca081dc7a3f53471f20928aa31b217559ded17205dfeaf239fe161da8d7bd"} Dec 11 09:41:07 crc kubenswrapper[4788]: I1211 09:41:07.783153 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:41:07 crc kubenswrapper[4788]: I1211 09:41:07.783194 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.210857 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-7474g" podStartSLOduration=13.518524672 podStartE2EDuration="1m5.210838432s" podCreationTimestamp="2025-12-11 09:40:03 +0000 UTC" firstStartedPulling="2025-12-11 09:40:13.746966097 +0000 UTC m=+1143.817745683" lastFinishedPulling="2025-12-11 09:41:05.439279837 +0000 UTC m=+1195.510059443" observedRunningTime="2025-12-11 09:41:08.207689032 +0000 UTC m=+1198.278468618" watchObservedRunningTime="2025-12-11 09:41:08.210838432 +0000 UTC m=+1198.281618018" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.422048 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.583437 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 11 09:41:08 crc kubenswrapper[4788]: E1211 09:41:08.584978 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" containerName="init" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.585000 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" containerName="init" Dec 11 09:41:08 crc kubenswrapper[4788]: E1211 09:41:08.585033 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" containerName="dnsmasq-dns" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.585040 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" containerName="dnsmasq-dns" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.585241 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="35f27cf1-e2d1-4f41-9b9b-20f0da6b2621" containerName="dnsmasq-dns" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.586259 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.588855 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-5djvs" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.589532 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.595695 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.595841 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.600395 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.709837 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.709889 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqvzk\" (UniqueName: \"kubernetes.io/projected/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-kube-api-access-cqvzk\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.709983 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.710014 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.710038 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-scripts\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.710080 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-config\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.710140 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.736100 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.736172 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.811685 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.812152 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.812195 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqvzk\" (UniqueName: \"kubernetes.io/projected/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-kube-api-access-cqvzk\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.812274 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.813082 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.813151 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.813217 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-scripts\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.813401 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-config\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.814656 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-config\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.814935 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-scripts\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.819967 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.823065 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.829833 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqvzk\" (UniqueName: \"kubernetes.io/projected/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-kube-api-access-cqvzk\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.830255 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb\") " pod="openstack/ovn-northd-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.847673 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.848997 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 11 09:41:08 crc kubenswrapper[4788]: I1211 09:41:08.913148 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.400670 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.757063 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-mp6nt"] Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.757331 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-mp6nt" podUID="d782ec20-acdd-4089-921f-dc2a69a2c2d6" containerName="dnsmasq-dns" containerID="cri-o://8fa007ce452a53d4942867d43da868d9128e69120b2ce0e05d526b5d5b02f3ec" gracePeriod=10 Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.761014 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.807349 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb","Type":"ContainerStarted","Data":"416e38561998cc5f4cfd284cd47734996d9bfe2f57101da97e16fcc72fda2852"} Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.811048 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-hk97f"] Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.814338 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.830324 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.830722 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.830753 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.830791 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwsbl\" (UniqueName: \"kubernetes.io/projected/c415f833-a0e0-428b-9ece-6c5f617dfc18-kube-api-access-kwsbl\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.830825 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-config\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.833640 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-hk97f"] Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.933394 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.933844 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.933984 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.934124 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwsbl\" (UniqueName: \"kubernetes.io/projected/c415f833-a0e0-428b-9ece-6c5f617dfc18-kube-api-access-kwsbl\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.934298 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-config\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.935357 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.935480 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-config\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.935896 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.936766 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:09 crc kubenswrapper[4788]: I1211 09:41:09.976801 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwsbl\" (UniqueName: \"kubernetes.io/projected/c415f833-a0e0-428b-9ece-6c5f617dfc18-kube-api-access-kwsbl\") pod \"dnsmasq-dns-b8fbc5445-hk97f\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:10 crc kubenswrapper[4788]: I1211 09:41:10.156114 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:10 crc kubenswrapper[4788]: I1211 09:41:10.453590 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8554648995-mp6nt" podUID="d782ec20-acdd-4089-921f-dc2a69a2c2d6" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.110:5353: connect: connection refused" Dec 11 09:41:10 crc kubenswrapper[4788]: I1211 09:41:10.844123 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-hk97f"] Dec 11 09:41:10 crc kubenswrapper[4788]: W1211 09:41:10.844862 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc415f833_a0e0_428b_9ece_6c5f617dfc18.slice/crio-296497b24e1c14a78529438789cece2933f3651afbc804c4288dd6aa19f8bcef WatchSource:0}: Error finding container 296497b24e1c14a78529438789cece2933f3651afbc804c4288dd6aa19f8bcef: Status 404 returned error can't find the container with id 296497b24e1c14a78529438789cece2933f3651afbc804c4288dd6aa19f8bcef Dec 11 09:41:10 crc kubenswrapper[4788]: I1211 09:41:10.890772 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 11 09:41:10 crc kubenswrapper[4788]: I1211 09:41:10.899984 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 11 09:41:10 crc kubenswrapper[4788]: I1211 09:41:10.905752 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-v4gvc" Dec 11 09:41:10 crc kubenswrapper[4788]: I1211 09:41:10.905955 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 11 09:41:10 crc kubenswrapper[4788]: I1211 09:41:10.907028 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 11 09:41:10 crc kubenswrapper[4788]: I1211 09:41:10.907207 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 11 09:41:10 crc kubenswrapper[4788]: I1211 09:41:10.907952 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.063921 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.064426 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.064477 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76r2c\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-kube-api-access-76r2c\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.064728 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/874f552a-7856-439c-937c-a87d9c15305c-lock\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.064843 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/874f552a-7856-439c-937c-a87d9c15305c-cache\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.166170 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.166253 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.166295 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76r2c\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-kube-api-access-76r2c\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.166386 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/874f552a-7856-439c-937c-a87d9c15305c-lock\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.166448 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/874f552a-7856-439c-937c-a87d9c15305c-cache\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: E1211 09:41:11.166458 4788 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 11 09:41:11 crc kubenswrapper[4788]: E1211 09:41:11.166474 4788 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 11 09:41:11 crc kubenswrapper[4788]: E1211 09:41:11.166524 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift podName:874f552a-7856-439c-937c-a87d9c15305c nodeName:}" failed. No retries permitted until 2025-12-11 09:41:11.666504299 +0000 UTC m=+1201.737283885 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift") pod "swift-storage-0" (UID: "874f552a-7856-439c-937c-a87d9c15305c") : configmap "swift-ring-files" not found Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.166575 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.167186 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/874f552a-7856-439c-937c-a87d9c15305c-lock\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.167265 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/874f552a-7856-439c-937c-a87d9c15305c-cache\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.188147 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76r2c\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-kube-api-access-76r2c\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.191928 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.472644 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-m5bdb"] Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.474122 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.481583 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.481654 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.481843 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.537327 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-m5bdb"] Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.575164 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js7s8\" (UniqueName: \"kubernetes.io/projected/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-kube-api-access-js7s8\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.575285 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-etc-swift\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.575444 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-ring-data-devices\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.575483 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-scripts\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.575517 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-combined-ca-bundle\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.575607 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-swiftconf\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.575642 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-dispersionconf\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.677763 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-ring-data-devices\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.677920 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-scripts\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.677977 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-combined-ca-bundle\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.678038 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-dispersionconf\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.678063 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-swiftconf\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.678148 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.678206 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js7s8\" (UniqueName: \"kubernetes.io/projected/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-kube-api-access-js7s8\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.678267 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-etc-swift\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: E1211 09:41:11.678756 4788 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 11 09:41:11 crc kubenswrapper[4788]: E1211 09:41:11.678868 4788 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 11 09:41:11 crc kubenswrapper[4788]: E1211 09:41:11.679006 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift podName:874f552a-7856-439c-937c-a87d9c15305c nodeName:}" failed. No retries permitted until 2025-12-11 09:41:12.678982666 +0000 UTC m=+1202.749762302 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift") pod "swift-storage-0" (UID: "874f552a-7856-439c-937c-a87d9c15305c") : configmap "swift-ring-files" not found Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.679924 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-ring-data-devices\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.680085 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-scripts\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.679043 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-etc-swift\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.685966 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-swiftconf\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.700277 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-dispersionconf\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.701697 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-combined-ca-bundle\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.705701 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js7s8\" (UniqueName: \"kubernetes.io/projected/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-kube-api-access-js7s8\") pod \"swift-ring-rebalance-m5bdb\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.825109 4788 generic.go:334] "Generic (PLEG): container finished" podID="d782ec20-acdd-4089-921f-dc2a69a2c2d6" containerID="8fa007ce452a53d4942867d43da868d9128e69120b2ce0e05d526b5d5b02f3ec" exitCode=0 Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.825180 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-mp6nt" event={"ID":"d782ec20-acdd-4089-921f-dc2a69a2c2d6","Type":"ContainerDied","Data":"8fa007ce452a53d4942867d43da868d9128e69120b2ce0e05d526b5d5b02f3ec"} Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.826126 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" event={"ID":"c415f833-a0e0-428b-9ece-6c5f617dfc18","Type":"ContainerStarted","Data":"296497b24e1c14a78529438789cece2933f3651afbc804c4288dd6aa19f8bcef"} Dec 11 09:41:11 crc kubenswrapper[4788]: I1211 09:41:11.854356 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:41:12 crc kubenswrapper[4788]: I1211 09:41:12.696791 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:12 crc kubenswrapper[4788]: E1211 09:41:12.697085 4788 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 11 09:41:12 crc kubenswrapper[4788]: E1211 09:41:12.697885 4788 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 11 09:41:12 crc kubenswrapper[4788]: E1211 09:41:12.697956 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift podName:874f552a-7856-439c-937c-a87d9c15305c nodeName:}" failed. No retries permitted until 2025-12-11 09:41:14.697932977 +0000 UTC m=+1204.768712563 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift") pod "swift-storage-0" (UID: "874f552a-7856-439c-937c-a87d9c15305c") : configmap "swift-ring-files" not found Dec 11 09:41:12 crc kubenswrapper[4788]: I1211 09:41:12.854423 4788 generic.go:334] "Generic (PLEG): container finished" podID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerID="bc1c19a6197e7b634b7b16fc3dc44ecbbc99c3bde1af082cb275a256c39e134c" exitCode=0 Dec 11 09:41:12 crc kubenswrapper[4788]: I1211 09:41:12.854510 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" event={"ID":"c415f833-a0e0-428b-9ece-6c5f617dfc18","Type":"ContainerDied","Data":"bc1c19a6197e7b634b7b16fc3dc44ecbbc99c3bde1af082cb275a256c39e134c"} Dec 11 09:41:12 crc kubenswrapper[4788]: I1211 09:41:12.978116 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-m5bdb"] Dec 11 09:41:13 crc kubenswrapper[4788]: W1211 09:41:13.241212 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f5299be_0ac1_4048_b2aa_6a07ce5c30e1.slice/crio-64f24a7fed54e06befa09624bcd6efd0bbf1d33cf6dc873615f2bc8c01e228e7 WatchSource:0}: Error finding container 64f24a7fed54e06befa09624bcd6efd0bbf1d33cf6dc873615f2bc8c01e228e7: Status 404 returned error can't find the container with id 64f24a7fed54e06befa09624bcd6efd0bbf1d33cf6dc873615f2bc8c01e228e7 Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.351986 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.355921 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.457639 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="f98e1b46-07d0-44d9-810c-4a778d44837d" containerName="galera" probeResult="failure" output=< Dec 11 09:41:13 crc kubenswrapper[4788]: wsrep_local_state_comment (Joined) differs from Synced Dec 11 09:41:13 crc kubenswrapper[4788]: > Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.478797 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-config\") pod \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.478888 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x2bg\" (UniqueName: \"kubernetes.io/projected/d782ec20-acdd-4089-921f-dc2a69a2c2d6-kube-api-access-2x2bg\") pod \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.478946 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-sb\") pod \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.479087 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-dns-svc\") pod \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.479149 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-nb\") pod \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\" (UID: \"d782ec20-acdd-4089-921f-dc2a69a2c2d6\") " Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.487911 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d782ec20-acdd-4089-921f-dc2a69a2c2d6-kube-api-access-2x2bg" (OuterVolumeSpecName: "kube-api-access-2x2bg") pod "d782ec20-acdd-4089-921f-dc2a69a2c2d6" (UID: "d782ec20-acdd-4089-921f-dc2a69a2c2d6"). InnerVolumeSpecName "kube-api-access-2x2bg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.538045 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-config" (OuterVolumeSpecName: "config") pod "d782ec20-acdd-4089-921f-dc2a69a2c2d6" (UID: "d782ec20-acdd-4089-921f-dc2a69a2c2d6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.550695 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d782ec20-acdd-4089-921f-dc2a69a2c2d6" (UID: "d782ec20-acdd-4089-921f-dc2a69a2c2d6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.559553 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d782ec20-acdd-4089-921f-dc2a69a2c2d6" (UID: "d782ec20-acdd-4089-921f-dc2a69a2c2d6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.560799 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d782ec20-acdd-4089-921f-dc2a69a2c2d6" (UID: "d782ec20-acdd-4089-921f-dc2a69a2c2d6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.583453 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.583488 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.583507 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.583518 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x2bg\" (UniqueName: \"kubernetes.io/projected/d782ec20-acdd-4089-921f-dc2a69a2c2d6-kube-api-access-2x2bg\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.583527 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d782ec20-acdd-4089-921f-dc2a69a2c2d6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.864875 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-mp6nt" event={"ID":"d782ec20-acdd-4089-921f-dc2a69a2c2d6","Type":"ContainerDied","Data":"0c01fb99c89d3dc36daad1d1157c6a73a8a62a30cd8fc8c30183a682f853f893"} Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.864896 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-mp6nt" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.865381 4788 scope.go:117] "RemoveContainer" containerID="8fa007ce452a53d4942867d43da868d9128e69120b2ce0e05d526b5d5b02f3ec" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.868322 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" event={"ID":"c415f833-a0e0-428b-9ece-6c5f617dfc18","Type":"ContainerStarted","Data":"83b2aaed2447d8eaca6eb6350a8212f4fef4162bef8f78c63e118f1be6b93791"} Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.869555 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.871460 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m5bdb" event={"ID":"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1","Type":"ContainerStarted","Data":"64f24a7fed54e06befa09624bcd6efd0bbf1d33cf6dc873615f2bc8c01e228e7"} Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.901053 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" podStartSLOduration=4.901024342 podStartE2EDuration="4.901024342s" podCreationTimestamp="2025-12-11 09:41:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:41:13.899007891 +0000 UTC m=+1203.969787487" watchObservedRunningTime="2025-12-11 09:41:13.901024342 +0000 UTC m=+1203.971803928" Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.943523 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-mp6nt"] Dec 11 09:41:13 crc kubenswrapper[4788]: I1211 09:41:13.950484 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-mp6nt"] Dec 11 09:41:14 crc kubenswrapper[4788]: I1211 09:41:14.521409 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d782ec20-acdd-4089-921f-dc2a69a2c2d6" path="/var/lib/kubelet/pods/d782ec20-acdd-4089-921f-dc2a69a2c2d6/volumes" Dec 11 09:41:14 crc kubenswrapper[4788]: I1211 09:41:14.708727 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:14 crc kubenswrapper[4788]: E1211 09:41:14.708946 4788 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 11 09:41:14 crc kubenswrapper[4788]: E1211 09:41:14.709004 4788 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 11 09:41:14 crc kubenswrapper[4788]: E1211 09:41:14.709069 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift podName:874f552a-7856-439c-937c-a87d9c15305c nodeName:}" failed. No retries permitted until 2025-12-11 09:41:18.70904943 +0000 UTC m=+1208.779829016 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift") pod "swift-storage-0" (UID: "874f552a-7856-439c-937c-a87d9c15305c") : configmap "swift-ring-files" not found Dec 11 09:41:18 crc kubenswrapper[4788]: I1211 09:41:18.789220 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:18 crc kubenswrapper[4788]: E1211 09:41:18.790202 4788 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 11 09:41:18 crc kubenswrapper[4788]: E1211 09:41:18.790247 4788 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 11 09:41:18 crc kubenswrapper[4788]: E1211 09:41:18.790308 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift podName:874f552a-7856-439c-937c-a87d9c15305c nodeName:}" failed. No retries permitted until 2025-12-11 09:41:26.790287815 +0000 UTC m=+1216.861067401 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift") pod "swift-storage-0" (UID: "874f552a-7856-439c-937c-a87d9c15305c") : configmap "swift-ring-files" not found Dec 11 09:41:18 crc kubenswrapper[4788]: I1211 09:41:18.828116 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 11 09:41:19 crc kubenswrapper[4788]: I1211 09:41:19.129025 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 11 09:41:19 crc kubenswrapper[4788]: I1211 09:41:19.223487 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="2552ec58-e76a-4c17-ab79-ac237c6d972c" containerName="galera" probeResult="failure" output=< Dec 11 09:41:19 crc kubenswrapper[4788]: wsrep_local_state_comment (Joined) differs from Synced Dec 11 09:41:19 crc kubenswrapper[4788]: > Dec 11 09:41:19 crc kubenswrapper[4788]: I1211 09:41:19.426909 4788 scope.go:117] "RemoveContainer" containerID="07f765b171f67ad63821ab32950f22ed6e4fac7ca855b86dad379fd8cadfbc82" Dec 11 09:41:20 crc kubenswrapper[4788]: I1211 09:41:20.158600 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:41:20 crc kubenswrapper[4788]: I1211 09:41:20.216691 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-f7mj4"] Dec 11 09:41:20 crc kubenswrapper[4788]: I1211 09:41:20.217675 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" podUID="c71b12a7-3e8a-418f-9bbb-c95851134efd" containerName="dnsmasq-dns" containerID="cri-o://3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a" gracePeriod=10 Dec 11 09:41:20 crc kubenswrapper[4788]: I1211 09:41:20.895709 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:41:20 crc kubenswrapper[4788]: I1211 09:41:20.953730 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb","Type":"ContainerStarted","Data":"c9d44f4b27f9d24da426c182d2fa0a24ecbf47b2ab3496d67b82e90eaa916bc7"} Dec 11 09:41:20 crc kubenswrapper[4788]: I1211 09:41:20.957764 4788 generic.go:334] "Generic (PLEG): container finished" podID="c71b12a7-3e8a-418f-9bbb-c95851134efd" containerID="3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a" exitCode=0 Dec 11 09:41:20 crc kubenswrapper[4788]: I1211 09:41:20.957819 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" event={"ID":"c71b12a7-3e8a-418f-9bbb-c95851134efd","Type":"ContainerDied","Data":"3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a"} Dec 11 09:41:20 crc kubenswrapper[4788]: I1211 09:41:20.957845 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" event={"ID":"c71b12a7-3e8a-418f-9bbb-c95851134efd","Type":"ContainerDied","Data":"bbf6c1d6565bfcd703f293cfc8a23418e924c06beb0f125c8a009bfa3475d77a"} Dec 11 09:41:20 crc kubenswrapper[4788]: I1211 09:41:20.957865 4788 scope.go:117] "RemoveContainer" containerID="3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a" Dec 11 09:41:20 crc kubenswrapper[4788]: I1211 09:41:20.957998 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-f7mj4" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.013830 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-dns-svc\") pod \"c71b12a7-3e8a-418f-9bbb-c95851134efd\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.013983 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-config\") pod \"c71b12a7-3e8a-418f-9bbb-c95851134efd\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.014163 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-ovsdbserver-sb\") pod \"c71b12a7-3e8a-418f-9bbb-c95851134efd\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.014219 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz5zf\" (UniqueName: \"kubernetes.io/projected/c71b12a7-3e8a-418f-9bbb-c95851134efd-kube-api-access-qz5zf\") pod \"c71b12a7-3e8a-418f-9bbb-c95851134efd\" (UID: \"c71b12a7-3e8a-418f-9bbb-c95851134efd\") " Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.021834 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c71b12a7-3e8a-418f-9bbb-c95851134efd-kube-api-access-qz5zf" (OuterVolumeSpecName: "kube-api-access-qz5zf") pod "c71b12a7-3e8a-418f-9bbb-c95851134efd" (UID: "c71b12a7-3e8a-418f-9bbb-c95851134efd"). InnerVolumeSpecName "kube-api-access-qz5zf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.028477 4788 scope.go:117] "RemoveContainer" containerID="ced4a3b46624d16b793a89d60ec34421eb9df46b45ecc720e832ad7cceac2d2c" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.100849 4788 scope.go:117] "RemoveContainer" containerID="3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a" Dec 11 09:41:21 crc kubenswrapper[4788]: E1211 09:41:21.101654 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a\": container with ID starting with 3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a not found: ID does not exist" containerID="3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.101723 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a"} err="failed to get container status \"3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a\": rpc error: code = NotFound desc = could not find container \"3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a\": container with ID starting with 3036e9f8b3d9bf7c17b50833defabcc2310648b2bd76ac6e2923eac2d7bd090a not found: ID does not exist" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.101768 4788 scope.go:117] "RemoveContainer" containerID="ced4a3b46624d16b793a89d60ec34421eb9df46b45ecc720e832ad7cceac2d2c" Dec 11 09:41:21 crc kubenswrapper[4788]: E1211 09:41:21.102628 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ced4a3b46624d16b793a89d60ec34421eb9df46b45ecc720e832ad7cceac2d2c\": container with ID starting with ced4a3b46624d16b793a89d60ec34421eb9df46b45ecc720e832ad7cceac2d2c not found: ID does not exist" containerID="ced4a3b46624d16b793a89d60ec34421eb9df46b45ecc720e832ad7cceac2d2c" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.102655 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ced4a3b46624d16b793a89d60ec34421eb9df46b45ecc720e832ad7cceac2d2c"} err="failed to get container status \"ced4a3b46624d16b793a89d60ec34421eb9df46b45ecc720e832ad7cceac2d2c\": rpc error: code = NotFound desc = could not find container \"ced4a3b46624d16b793a89d60ec34421eb9df46b45ecc720e832ad7cceac2d2c\": container with ID starting with ced4a3b46624d16b793a89d60ec34421eb9df46b45ecc720e832ad7cceac2d2c not found: ID does not exist" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.102987 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c71b12a7-3e8a-418f-9bbb-c95851134efd" (UID: "c71b12a7-3e8a-418f-9bbb-c95851134efd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.119969 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.120020 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz5zf\" (UniqueName: \"kubernetes.io/projected/c71b12a7-3e8a-418f-9bbb-c95851134efd-kube-api-access-qz5zf\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.151346 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-config" (OuterVolumeSpecName: "config") pod "c71b12a7-3e8a-418f-9bbb-c95851134efd" (UID: "c71b12a7-3e8a-418f-9bbb-c95851134efd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.157807 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c71b12a7-3e8a-418f-9bbb-c95851134efd" (UID: "c71b12a7-3e8a-418f-9bbb-c95851134efd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.222460 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.222507 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c71b12a7-3e8a-418f-9bbb-c95851134efd-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.350365 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-f7mj4"] Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.357720 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-f7mj4"] Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.971006 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb","Type":"ContainerStarted","Data":"e15f6c7694c2116510c61724544b7a4d4cdd4ff29a31b719d31ffe0cfa3d785b"} Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.971643 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.973223 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dd95dacb-1402-422f-a9c0-f1e8a4b6d01e","Type":"ContainerStarted","Data":"b0d88b6b80367a63483f41256f0fdf3fc47fd15107ea5357bcdd042d59785db2"} Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.973811 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 11 09:41:21 crc kubenswrapper[4788]: I1211 09:41:21.997932 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.152625037 podStartE2EDuration="13.997906186s" podCreationTimestamp="2025-12-11 09:41:08 +0000 UTC" firstStartedPulling="2025-12-11 09:41:09.422914055 +0000 UTC m=+1199.493693641" lastFinishedPulling="2025-12-11 09:41:20.268195204 +0000 UTC m=+1210.338974790" observedRunningTime="2025-12-11 09:41:21.997146987 +0000 UTC m=+1212.067926573" watchObservedRunningTime="2025-12-11 09:41:21.997906186 +0000 UTC m=+1212.068685772" Dec 11 09:41:22 crc kubenswrapper[4788]: I1211 09:41:22.016399 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.702721906 podStartE2EDuration="1m23.016381615s" podCreationTimestamp="2025-12-11 09:39:59 +0000 UTC" firstStartedPulling="2025-12-11 09:40:00.600509651 +0000 UTC m=+1130.671289237" lastFinishedPulling="2025-12-11 09:41:20.91416935 +0000 UTC m=+1210.984948946" observedRunningTime="2025-12-11 09:41:22.016161919 +0000 UTC m=+1212.086941505" watchObservedRunningTime="2025-12-11 09:41:22.016381615 +0000 UTC m=+1212.087161201" Dec 11 09:41:22 crc kubenswrapper[4788]: I1211 09:41:22.507919 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c71b12a7-3e8a-418f-9bbb-c95851134efd" path="/var/lib/kubelet/pods/c71b12a7-3e8a-418f-9bbb-c95851134efd/volumes" Dec 11 09:41:26 crc kubenswrapper[4788]: I1211 09:41:26.833114 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:26 crc kubenswrapper[4788]: E1211 09:41:26.833357 4788 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 11 09:41:26 crc kubenswrapper[4788]: E1211 09:41:26.833761 4788 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 11 09:41:26 crc kubenswrapper[4788]: E1211 09:41:26.833817 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift podName:874f552a-7856-439c-937c-a87d9c15305c nodeName:}" failed. No retries permitted until 2025-12-11 09:41:42.833802523 +0000 UTC m=+1232.904582109 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift") pod "swift-storage-0" (UID: "874f552a-7856-439c-937c-a87d9c15305c") : configmap "swift-ring-files" not found Dec 11 09:41:28 crc kubenswrapper[4788]: I1211 09:41:28.920020 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-5hgrc" podUID="b899f552-09d4-4919-a3f1-79ff044cd435" containerName="ovn-controller" probeResult="failure" output=< Dec 11 09:41:28 crc kubenswrapper[4788]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 11 09:41:28 crc kubenswrapper[4788]: > Dec 11 09:41:28 crc kubenswrapper[4788]: I1211 09:41:28.928730 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 11 09:41:29 crc kubenswrapper[4788]: I1211 09:41:29.776746 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 11 09:41:31 crc kubenswrapper[4788]: I1211 09:41:31.053267 4788 generic.go:334] "Generic (PLEG): container finished" podID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" containerID="512fd2cbdec642fe0e8d83e6ef7e1028976ab3008554b4f0c9736d8bf1910af3" exitCode=0 Dec 11 09:41:31 crc kubenswrapper[4788]: I1211 09:41:31.053290 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c377af1f-84a0-4b96-8fa2-0f66751cd40d","Type":"ContainerDied","Data":"512fd2cbdec642fe0e8d83e6ef7e1028976ab3008554b4f0c9736d8bf1910af3"} Dec 11 09:41:31 crc kubenswrapper[4788]: I1211 09:41:31.059997 4788 generic.go:334] "Generic (PLEG): container finished" podID="ef72aa19-1387-4180-957c-4bfec95e5562" containerID="ecdda98068d7c12b5c293a43670f5aa25750a921ae152c57a3c48bdaf6f07e5d" exitCode=0 Dec 11 09:41:31 crc kubenswrapper[4788]: I1211 09:41:31.060043 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef72aa19-1387-4180-957c-4bfec95e5562","Type":"ContainerDied","Data":"ecdda98068d7c12b5c293a43670f5aa25750a921ae152c57a3c48bdaf6f07e5d"} Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.900414 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-9a29-account-create-update-qv8wk"] Dec 11 09:41:33 crc kubenswrapper[4788]: E1211 09:41:33.902396 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c71b12a7-3e8a-418f-9bbb-c95851134efd" containerName="dnsmasq-dns" Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.902496 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c71b12a7-3e8a-418f-9bbb-c95851134efd" containerName="dnsmasq-dns" Dec 11 09:41:33 crc kubenswrapper[4788]: E1211 09:41:33.902581 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d782ec20-acdd-4089-921f-dc2a69a2c2d6" containerName="init" Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.902645 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d782ec20-acdd-4089-921f-dc2a69a2c2d6" containerName="init" Dec 11 09:41:33 crc kubenswrapper[4788]: E1211 09:41:33.902705 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c71b12a7-3e8a-418f-9bbb-c95851134efd" containerName="init" Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.902767 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c71b12a7-3e8a-418f-9bbb-c95851134efd" containerName="init" Dec 11 09:41:33 crc kubenswrapper[4788]: E1211 09:41:33.902868 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d782ec20-acdd-4089-921f-dc2a69a2c2d6" containerName="dnsmasq-dns" Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.902958 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d782ec20-acdd-4089-921f-dc2a69a2c2d6" containerName="dnsmasq-dns" Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.903316 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c71b12a7-3e8a-418f-9bbb-c95851134efd" containerName="dnsmasq-dns" Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.903425 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="d782ec20-acdd-4089-921f-dc2a69a2c2d6" containerName="dnsmasq-dns" Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.904125 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9a29-account-create-update-qv8wk" Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.907217 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.909883 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9a29-account-create-update-qv8wk"] Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.929766 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-jl2kd"] Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.934152 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jl2kd" Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.945809 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-jl2kd"] Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.949172 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-5hgrc" podUID="b899f552-09d4-4919-a3f1-79ff044cd435" containerName="ovn-controller" probeResult="failure" output=< Dec 11 09:41:33 crc kubenswrapper[4788]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 11 09:41:33 crc kubenswrapper[4788]: > Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.965217 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/776102e1-f9bb-46db-bf9e-b3171a8d64fa-operator-scripts\") pod \"glance-9a29-account-create-update-qv8wk\" (UID: \"776102e1-f9bb-46db-bf9e-b3171a8d64fa\") " pod="openstack/glance-9a29-account-create-update-qv8wk" Dec 11 09:41:33 crc kubenswrapper[4788]: I1211 09:41:33.965662 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmfmg\" (UniqueName: \"kubernetes.io/projected/776102e1-f9bb-46db-bf9e-b3171a8d64fa-kube-api-access-dmfmg\") pod \"glance-9a29-account-create-update-qv8wk\" (UID: \"776102e1-f9bb-46db-bf9e-b3171a8d64fa\") " pod="openstack/glance-9a29-account-create-update-qv8wk" Dec 11 09:41:33 crc kubenswrapper[4788]: E1211 09:41:33.991749 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified" Dec 11 09:41:33 crc kubenswrapper[4788]: E1211 09:41:33.992057 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:swift-ring-rebalance,Image:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,Command:[/usr/local/bin/swift-ring-tool all],Args:[],WorkingDir:/etc/swift,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CM_NAME,Value:swift-ring-files,ValueFrom:nil,},EnvVar{Name:NAMESPACE,Value:openstack,ValueFrom:nil,},EnvVar{Name:OWNER_APIVERSION,Value:swift.openstack.org/v1beta1,ValueFrom:nil,},EnvVar{Name:OWNER_KIND,Value:SwiftRing,ValueFrom:nil,},EnvVar{Name:OWNER_NAME,Value:swift-ring,ValueFrom:nil,},EnvVar{Name:OWNER_UID,Value:18d7b6bd-c4cd-48c0-84ae-75643455ca8c,ValueFrom:nil,},EnvVar{Name:SWIFT_MIN_PART_HOURS,Value:1,ValueFrom:nil,},EnvVar{Name:SWIFT_PART_POWER,Value:10,ValueFrom:nil,},EnvVar{Name:SWIFT_REPLICAS,Value:1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/swift-ring-tool,SubPath:swift-ring-tool,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:swiftconf,ReadOnly:true,MountPath:/etc/swift/swift.conf,SubPath:swift.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:etc-swift,ReadOnly:false,MountPath:/etc/swift,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ring-data-devices,ReadOnly:true,MountPath:/var/lib/config-data/ring-devices,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dispersionconf,ReadOnly:true,MountPath:/etc/swift/dispersion.conf,SubPath:dispersion.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-js7s8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42445,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-ring-rebalance-m5bdb_openstack(6f5299be-0ac1-4048-b2aa-6a07ce5c30e1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:41:33 crc kubenswrapper[4788]: E1211 09:41:33.993937 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"swift-ring-rebalance\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/swift-ring-rebalance-m5bdb" podUID="6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.021619 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.067222 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/776102e1-f9bb-46db-bf9e-b3171a8d64fa-operator-scripts\") pod \"glance-9a29-account-create-update-qv8wk\" (UID: \"776102e1-f9bb-46db-bf9e-b3171a8d64fa\") " pod="openstack/glance-9a29-account-create-update-qv8wk" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.067317 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d15af45-a8f2-4689-a633-82abfcd03bae-operator-scripts\") pod \"glance-db-create-jl2kd\" (UID: \"8d15af45-a8f2-4689-a633-82abfcd03bae\") " pod="openstack/glance-db-create-jl2kd" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.067375 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmfmg\" (UniqueName: \"kubernetes.io/projected/776102e1-f9bb-46db-bf9e-b3171a8d64fa-kube-api-access-dmfmg\") pod \"glance-9a29-account-create-update-qv8wk\" (UID: \"776102e1-f9bb-46db-bf9e-b3171a8d64fa\") " pod="openstack/glance-9a29-account-create-update-qv8wk" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.067547 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g86zz\" (UniqueName: \"kubernetes.io/projected/8d15af45-a8f2-4689-a633-82abfcd03bae-kube-api-access-g86zz\") pod \"glance-db-create-jl2kd\" (UID: \"8d15af45-a8f2-4689-a633-82abfcd03bae\") " pod="openstack/glance-db-create-jl2kd" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.068597 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/776102e1-f9bb-46db-bf9e-b3171a8d64fa-operator-scripts\") pod \"glance-9a29-account-create-update-qv8wk\" (UID: \"776102e1-f9bb-46db-bf9e-b3171a8d64fa\") " pod="openstack/glance-9a29-account-create-update-qv8wk" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.090970 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmfmg\" (UniqueName: \"kubernetes.io/projected/776102e1-f9bb-46db-bf9e-b3171a8d64fa-kube-api-access-dmfmg\") pod \"glance-9a29-account-create-update-qv8wk\" (UID: \"776102e1-f9bb-46db-bf9e-b3171a8d64fa\") " pod="openstack/glance-9a29-account-create-update-qv8wk" Dec 11 09:41:34 crc kubenswrapper[4788]: E1211 09:41:34.103402 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"swift-ring-rebalance\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified\\\"\"" pod="openstack/swift-ring-rebalance-m5bdb" podUID="6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.172889 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g86zz\" (UniqueName: \"kubernetes.io/projected/8d15af45-a8f2-4689-a633-82abfcd03bae-kube-api-access-g86zz\") pod \"glance-db-create-jl2kd\" (UID: \"8d15af45-a8f2-4689-a633-82abfcd03bae\") " pod="openstack/glance-db-create-jl2kd" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.173543 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d15af45-a8f2-4689-a633-82abfcd03bae-operator-scripts\") pod \"glance-db-create-jl2kd\" (UID: \"8d15af45-a8f2-4689-a633-82abfcd03bae\") " pod="openstack/glance-db-create-jl2kd" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.174192 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d15af45-a8f2-4689-a633-82abfcd03bae-operator-scripts\") pod \"glance-db-create-jl2kd\" (UID: \"8d15af45-a8f2-4689-a633-82abfcd03bae\") " pod="openstack/glance-db-create-jl2kd" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.197009 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g86zz\" (UniqueName: \"kubernetes.io/projected/8d15af45-a8f2-4689-a633-82abfcd03bae-kube-api-access-g86zz\") pod \"glance-db-create-jl2kd\" (UID: \"8d15af45-a8f2-4689-a633-82abfcd03bae\") " pod="openstack/glance-db-create-jl2kd" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.234058 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9a29-account-create-update-qv8wk" Dec 11 09:41:34 crc kubenswrapper[4788]: I1211 09:41:34.264219 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jl2kd" Dec 11 09:41:35 crc kubenswrapper[4788]: I1211 09:41:35.114590 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c377af1f-84a0-4b96-8fa2-0f66751cd40d","Type":"ContainerStarted","Data":"0f19bd5a6a95b298d169849946fedcf95c25c2983d7367c81b8e61cc54a61939"} Dec 11 09:41:35 crc kubenswrapper[4788]: I1211 09:41:35.116504 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:41:35 crc kubenswrapper[4788]: I1211 09:41:35.123113 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef72aa19-1387-4180-957c-4bfec95e5562","Type":"ContainerStarted","Data":"26eb33385d08486834954168c890727574be50e64a22de92cf4ff8c02dac8bdd"} Dec 11 09:41:35 crc kubenswrapper[4788]: I1211 09:41:35.124096 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 11 09:41:35 crc kubenswrapper[4788]: I1211 09:41:35.154377 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371933.700418 podStartE2EDuration="1m43.154357034s" podCreationTimestamp="2025-12-11 09:39:52 +0000 UTC" firstStartedPulling="2025-12-11 09:40:00.478787761 +0000 UTC m=+1130.549567347" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:41:35.148546686 +0000 UTC m=+1225.219326272" watchObservedRunningTime="2025-12-11 09:41:35.154357034 +0000 UTC m=+1225.225136620" Dec 11 09:41:35 crc kubenswrapper[4788]: I1211 09:41:35.180562 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=49.987992672 podStartE2EDuration="1m42.180541159s" podCreationTimestamp="2025-12-11 09:39:53 +0000 UTC" firstStartedPulling="2025-12-11 09:40:00.487631616 +0000 UTC m=+1130.558411202" lastFinishedPulling="2025-12-11 09:40:52.680180103 +0000 UTC m=+1182.750959689" observedRunningTime="2025-12-11 09:41:35.173757036 +0000 UTC m=+1225.244536642" watchObservedRunningTime="2025-12-11 09:41:35.180541159 +0000 UTC m=+1225.251320755" Dec 11 09:41:35 crc kubenswrapper[4788]: I1211 09:41:35.353191 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-jl2kd"] Dec 11 09:41:35 crc kubenswrapper[4788]: I1211 09:41:35.617353 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-9a29-account-create-update-qv8wk"] Dec 11 09:41:36 crc kubenswrapper[4788]: I1211 09:41:36.131532 4788 generic.go:334] "Generic (PLEG): container finished" podID="776102e1-f9bb-46db-bf9e-b3171a8d64fa" containerID="ec51dad2a93538ebcb2573c2a4b32c519ff312933347e7af2b685035037750c8" exitCode=0 Dec 11 09:41:36 crc kubenswrapper[4788]: I1211 09:41:36.131616 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9a29-account-create-update-qv8wk" event={"ID":"776102e1-f9bb-46db-bf9e-b3171a8d64fa","Type":"ContainerDied","Data":"ec51dad2a93538ebcb2573c2a4b32c519ff312933347e7af2b685035037750c8"} Dec 11 09:41:36 crc kubenswrapper[4788]: I1211 09:41:36.131646 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9a29-account-create-update-qv8wk" event={"ID":"776102e1-f9bb-46db-bf9e-b3171a8d64fa","Type":"ContainerStarted","Data":"fbacaacd2e75e653b28ba343a9c997c098ab2c1eb44095b6cbdb2f87436d6e3a"} Dec 11 09:41:36 crc kubenswrapper[4788]: I1211 09:41:36.134558 4788 generic.go:334] "Generic (PLEG): container finished" podID="8d15af45-a8f2-4689-a633-82abfcd03bae" containerID="ff25c34ffa144204887874c798aa10d9d501f8ab34e811a4b67e67006e1559c3" exitCode=0 Dec 11 09:41:36 crc kubenswrapper[4788]: I1211 09:41:36.134616 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-jl2kd" event={"ID":"8d15af45-a8f2-4689-a633-82abfcd03bae","Type":"ContainerDied","Data":"ff25c34ffa144204887874c798aa10d9d501f8ab34e811a4b67e67006e1559c3"} Dec 11 09:41:36 crc kubenswrapper[4788]: I1211 09:41:36.134717 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-jl2kd" event={"ID":"8d15af45-a8f2-4689-a633-82abfcd03bae","Type":"ContainerStarted","Data":"7d73f5f0ec7ec4eef0b2fe4af437eb3e1000c49d89f7d29161f4a0c0e661bd11"} Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.347367 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-s98g2"] Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.350507 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-s98g2" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.356402 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-s98g2"] Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.398979 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5vzx\" (UniqueName: \"kubernetes.io/projected/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-kube-api-access-z5vzx\") pod \"keystone-db-create-s98g2\" (UID: \"638ccdd0-d9e8-4608-9eb9-eb3abebae04d\") " pod="openstack/keystone-db-create-s98g2" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.399040 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-operator-scripts\") pod \"keystone-db-create-s98g2\" (UID: \"638ccdd0-d9e8-4608-9eb9-eb3abebae04d\") " pod="openstack/keystone-db-create-s98g2" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.500284 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5vzx\" (UniqueName: \"kubernetes.io/projected/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-kube-api-access-z5vzx\") pod \"keystone-db-create-s98g2\" (UID: \"638ccdd0-d9e8-4608-9eb9-eb3abebae04d\") " pod="openstack/keystone-db-create-s98g2" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.500347 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-operator-scripts\") pod \"keystone-db-create-s98g2\" (UID: \"638ccdd0-d9e8-4608-9eb9-eb3abebae04d\") " pod="openstack/keystone-db-create-s98g2" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.501570 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-operator-scripts\") pod \"keystone-db-create-s98g2\" (UID: \"638ccdd0-d9e8-4608-9eb9-eb3abebae04d\") " pod="openstack/keystone-db-create-s98g2" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.509230 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5ce6-account-create-update-4dlzg"] Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.510401 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5ce6-account-create-update-4dlzg" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.514725 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.542127 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5vzx\" (UniqueName: \"kubernetes.io/projected/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-kube-api-access-z5vzx\") pod \"keystone-db-create-s98g2\" (UID: \"638ccdd0-d9e8-4608-9eb9-eb3abebae04d\") " pod="openstack/keystone-db-create-s98g2" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.543589 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5ce6-account-create-update-4dlzg"] Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.604959 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-operator-scripts\") pod \"keystone-5ce6-account-create-update-4dlzg\" (UID: \"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02\") " pod="openstack/keystone-5ce6-account-create-update-4dlzg" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.605503 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb8w7\" (UniqueName: \"kubernetes.io/projected/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-kube-api-access-lb8w7\") pod \"keystone-5ce6-account-create-update-4dlzg\" (UID: \"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02\") " pod="openstack/keystone-5ce6-account-create-update-4dlzg" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.966403 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-operator-scripts\") pod \"keystone-5ce6-account-create-update-4dlzg\" (UID: \"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02\") " pod="openstack/keystone-5ce6-account-create-update-4dlzg" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.966462 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb8w7\" (UniqueName: \"kubernetes.io/projected/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-kube-api-access-lb8w7\") pod \"keystone-5ce6-account-create-update-4dlzg\" (UID: \"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02\") " pod="openstack/keystone-5ce6-account-create-update-4dlzg" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.967496 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-s98g2" Dec 11 09:41:37 crc kubenswrapper[4788]: I1211 09:41:37.967605 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-operator-scripts\") pod \"keystone-5ce6-account-create-update-4dlzg\" (UID: \"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02\") " pod="openstack/keystone-5ce6-account-create-update-4dlzg" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.021046 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb8w7\" (UniqueName: \"kubernetes.io/projected/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-kube-api-access-lb8w7\") pod \"keystone-5ce6-account-create-update-4dlzg\" (UID: \"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02\") " pod="openstack/keystone-5ce6-account-create-update-4dlzg" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.030120 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-mlxpj"] Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.031901 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mlxpj" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.060735 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-26ad-account-create-update-q7plk"] Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.062816 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-26ad-account-create-update-q7plk" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.065367 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.078404 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-mlxpj"] Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.100387 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-26ad-account-create-update-q7plk"] Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.133754 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5ce6-account-create-update-4dlzg" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.169972 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac89feb3-6e9d-4d4e-bfbc-313328583a65-operator-scripts\") pod \"placement-db-create-mlxpj\" (UID: \"ac89feb3-6e9d-4d4e-bfbc-313328583a65\") " pod="openstack/placement-db-create-mlxpj" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.170064 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcrnl\" (UniqueName: \"kubernetes.io/projected/bceadd92-112d-46ef-bfa8-4fd844c01ebf-kube-api-access-mcrnl\") pod \"placement-26ad-account-create-update-q7plk\" (UID: \"bceadd92-112d-46ef-bfa8-4fd844c01ebf\") " pod="openstack/placement-26ad-account-create-update-q7plk" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.170112 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bceadd92-112d-46ef-bfa8-4fd844c01ebf-operator-scripts\") pod \"placement-26ad-account-create-update-q7plk\" (UID: \"bceadd92-112d-46ef-bfa8-4fd844c01ebf\") " pod="openstack/placement-26ad-account-create-update-q7plk" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.170166 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7lht\" (UniqueName: \"kubernetes.io/projected/ac89feb3-6e9d-4d4e-bfbc-313328583a65-kube-api-access-k7lht\") pod \"placement-db-create-mlxpj\" (UID: \"ac89feb3-6e9d-4d4e-bfbc-313328583a65\") " pod="openstack/placement-db-create-mlxpj" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.276711 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac89feb3-6e9d-4d4e-bfbc-313328583a65-operator-scripts\") pod \"placement-db-create-mlxpj\" (UID: \"ac89feb3-6e9d-4d4e-bfbc-313328583a65\") " pod="openstack/placement-db-create-mlxpj" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.276825 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcrnl\" (UniqueName: \"kubernetes.io/projected/bceadd92-112d-46ef-bfa8-4fd844c01ebf-kube-api-access-mcrnl\") pod \"placement-26ad-account-create-update-q7plk\" (UID: \"bceadd92-112d-46ef-bfa8-4fd844c01ebf\") " pod="openstack/placement-26ad-account-create-update-q7plk" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.276871 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bceadd92-112d-46ef-bfa8-4fd844c01ebf-operator-scripts\") pod \"placement-26ad-account-create-update-q7plk\" (UID: \"bceadd92-112d-46ef-bfa8-4fd844c01ebf\") " pod="openstack/placement-26ad-account-create-update-q7plk" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.276919 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7lht\" (UniqueName: \"kubernetes.io/projected/ac89feb3-6e9d-4d4e-bfbc-313328583a65-kube-api-access-k7lht\") pod \"placement-db-create-mlxpj\" (UID: \"ac89feb3-6e9d-4d4e-bfbc-313328583a65\") " pod="openstack/placement-db-create-mlxpj" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.278795 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac89feb3-6e9d-4d4e-bfbc-313328583a65-operator-scripts\") pod \"placement-db-create-mlxpj\" (UID: \"ac89feb3-6e9d-4d4e-bfbc-313328583a65\") " pod="openstack/placement-db-create-mlxpj" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.278904 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bceadd92-112d-46ef-bfa8-4fd844c01ebf-operator-scripts\") pod \"placement-26ad-account-create-update-q7plk\" (UID: \"bceadd92-112d-46ef-bfa8-4fd844c01ebf\") " pod="openstack/placement-26ad-account-create-update-q7plk" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.301929 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcrnl\" (UniqueName: \"kubernetes.io/projected/bceadd92-112d-46ef-bfa8-4fd844c01ebf-kube-api-access-mcrnl\") pod \"placement-26ad-account-create-update-q7plk\" (UID: \"bceadd92-112d-46ef-bfa8-4fd844c01ebf\") " pod="openstack/placement-26ad-account-create-update-q7plk" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.302678 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7lht\" (UniqueName: \"kubernetes.io/projected/ac89feb3-6e9d-4d4e-bfbc-313328583a65-kube-api-access-k7lht\") pod \"placement-db-create-mlxpj\" (UID: \"ac89feb3-6e9d-4d4e-bfbc-313328583a65\") " pod="openstack/placement-db-create-mlxpj" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.303828 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jl2kd" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.322520 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9a29-account-create-update-qv8wk" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.383975 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g86zz\" (UniqueName: \"kubernetes.io/projected/8d15af45-a8f2-4689-a633-82abfcd03bae-kube-api-access-g86zz\") pod \"8d15af45-a8f2-4689-a633-82abfcd03bae\" (UID: \"8d15af45-a8f2-4689-a633-82abfcd03bae\") " Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.384467 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmfmg\" (UniqueName: \"kubernetes.io/projected/776102e1-f9bb-46db-bf9e-b3171a8d64fa-kube-api-access-dmfmg\") pod \"776102e1-f9bb-46db-bf9e-b3171a8d64fa\" (UID: \"776102e1-f9bb-46db-bf9e-b3171a8d64fa\") " Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.384547 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d15af45-a8f2-4689-a633-82abfcd03bae-operator-scripts\") pod \"8d15af45-a8f2-4689-a633-82abfcd03bae\" (UID: \"8d15af45-a8f2-4689-a633-82abfcd03bae\") " Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.384586 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/776102e1-f9bb-46db-bf9e-b3171a8d64fa-operator-scripts\") pod \"776102e1-f9bb-46db-bf9e-b3171a8d64fa\" (UID: \"776102e1-f9bb-46db-bf9e-b3171a8d64fa\") " Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.385627 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/776102e1-f9bb-46db-bf9e-b3171a8d64fa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "776102e1-f9bb-46db-bf9e-b3171a8d64fa" (UID: "776102e1-f9bb-46db-bf9e-b3171a8d64fa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.386639 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d15af45-a8f2-4689-a633-82abfcd03bae-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8d15af45-a8f2-4689-a633-82abfcd03bae" (UID: "8d15af45-a8f2-4689-a633-82abfcd03bae"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.391953 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d15af45-a8f2-4689-a633-82abfcd03bae-kube-api-access-g86zz" (OuterVolumeSpecName: "kube-api-access-g86zz") pod "8d15af45-a8f2-4689-a633-82abfcd03bae" (UID: "8d15af45-a8f2-4689-a633-82abfcd03bae"). InnerVolumeSpecName "kube-api-access-g86zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.393798 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/776102e1-f9bb-46db-bf9e-b3171a8d64fa-kube-api-access-dmfmg" (OuterVolumeSpecName: "kube-api-access-dmfmg") pod "776102e1-f9bb-46db-bf9e-b3171a8d64fa" (UID: "776102e1-f9bb-46db-bf9e-b3171a8d64fa"). InnerVolumeSpecName "kube-api-access-dmfmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.452576 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mlxpj" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.464315 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-26ad-account-create-update-q7plk" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.486749 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g86zz\" (UniqueName: \"kubernetes.io/projected/8d15af45-a8f2-4689-a633-82abfcd03bae-kube-api-access-g86zz\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.486789 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmfmg\" (UniqueName: \"kubernetes.io/projected/776102e1-f9bb-46db-bf9e-b3171a8d64fa-kube-api-access-dmfmg\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.486799 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d15af45-a8f2-4689-a633-82abfcd03bae-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.486808 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/776102e1-f9bb-46db-bf9e-b3171a8d64fa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.717546 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-s98g2"] Dec 11 09:41:38 crc kubenswrapper[4788]: W1211 09:41:38.734813 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod638ccdd0_d9e8_4608_9eb9_eb3abebae04d.slice/crio-8a6b00b9e68e41b6e8ec67ae1a87dad5b37c6a9b34cf5c7c874f557df9fb55af WatchSource:0}: Error finding container 8a6b00b9e68e41b6e8ec67ae1a87dad5b37c6a9b34cf5c7c874f557df9fb55af: Status 404 returned error can't find the container with id 8a6b00b9e68e41b6e8ec67ae1a87dad5b37c6a9b34cf5c7c874f557df9fb55af Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.800329 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5ce6-account-create-update-4dlzg"] Dec 11 09:41:38 crc kubenswrapper[4788]: W1211 09:41:38.810917 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a40a6f3_d7b0_4064_9e58_aea4c9a8bd02.slice/crio-513001e756fe18a74b1278717893a261c4ede1b3bb60b462b823c2cba58034b0 WatchSource:0}: Error finding container 513001e756fe18a74b1278717893a261c4ede1b3bb60b462b823c2cba58034b0: Status 404 returned error can't find the container with id 513001e756fe18a74b1278717893a261c4ede1b3bb60b462b823c2cba58034b0 Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.812726 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-mlxpj"] Dec 11 09:41:38 crc kubenswrapper[4788]: W1211 09:41:38.869619 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac89feb3_6e9d_4d4e_bfbc_313328583a65.slice/crio-c12f5b86186888cefbb0e41a2110732f23511a51b4333b6b6c7876decdc0cdfb WatchSource:0}: Error finding container c12f5b86186888cefbb0e41a2110732f23511a51b4333b6b6c7876decdc0cdfb: Status 404 returned error can't find the container with id c12f5b86186888cefbb0e41a2110732f23511a51b4333b6b6c7876decdc0cdfb Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.892596 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-26ad-account-create-update-q7plk"] Dec 11 09:41:38 crc kubenswrapper[4788]: W1211 09:41:38.946082 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbceadd92_112d_46ef_bfa8_4fd844c01ebf.slice/crio-f88528c28168576c711e64f5107a5e6cf9eaf514216284c992ebf0febabe9ed6 WatchSource:0}: Error finding container f88528c28168576c711e64f5107a5e6cf9eaf514216284c992ebf0febabe9ed6: Status 404 returned error can't find the container with id f88528c28168576c711e64f5107a5e6cf9eaf514216284c992ebf0febabe9ed6 Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.953484 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-5hgrc" podUID="b899f552-09d4-4919-a3f1-79ff044cd435" containerName="ovn-controller" probeResult="failure" output=< Dec 11 09:41:38 crc kubenswrapper[4788]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 11 09:41:38 crc kubenswrapper[4788]: > Dec 11 09:41:38 crc kubenswrapper[4788]: I1211 09:41:38.992995 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.009839 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-7474g" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.182754 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-9a29-account-create-update-qv8wk" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.182750 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-9a29-account-create-update-qv8wk" event={"ID":"776102e1-f9bb-46db-bf9e-b3171a8d64fa","Type":"ContainerDied","Data":"fbacaacd2e75e653b28ba343a9c997c098ab2c1eb44095b6cbdb2f87436d6e3a"} Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.183289 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fbacaacd2e75e653b28ba343a9c997c098ab2c1eb44095b6cbdb2f87436d6e3a" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.185214 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-s98g2" event={"ID":"638ccdd0-d9e8-4608-9eb9-eb3abebae04d","Type":"ContainerStarted","Data":"300d96478e62c8fae7b5ec769f1ba6cca4a1d1e3686c00d0a3abccd805eb1e86"} Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.185291 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-s98g2" event={"ID":"638ccdd0-d9e8-4608-9eb9-eb3abebae04d","Type":"ContainerStarted","Data":"8a6b00b9e68e41b6e8ec67ae1a87dad5b37c6a9b34cf5c7c874f557df9fb55af"} Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.192020 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-mlxpj" event={"ID":"ac89feb3-6e9d-4d4e-bfbc-313328583a65","Type":"ContainerStarted","Data":"8ac0b13f2c9eb948be42f6eed506390e7465eda044ed69b7f08b60752cf99e63"} Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.192074 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-mlxpj" event={"ID":"ac89feb3-6e9d-4d4e-bfbc-313328583a65","Type":"ContainerStarted","Data":"c12f5b86186888cefbb0e41a2110732f23511a51b4333b6b6c7876decdc0cdfb"} Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.196934 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-26ad-account-create-update-q7plk" event={"ID":"bceadd92-112d-46ef-bfa8-4fd844c01ebf","Type":"ContainerStarted","Data":"f88528c28168576c711e64f5107a5e6cf9eaf514216284c992ebf0febabe9ed6"} Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.200799 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-jl2kd" event={"ID":"8d15af45-a8f2-4689-a633-82abfcd03bae","Type":"ContainerDied","Data":"7d73f5f0ec7ec4eef0b2fe4af437eb3e1000c49d89f7d29161f4a0c0e661bd11"} Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.200846 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d73f5f0ec7ec4eef0b2fe4af437eb3e1000c49d89f7d29161f4a0c0e661bd11" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.200927 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jl2kd" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.204880 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5ce6-account-create-update-4dlzg" event={"ID":"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02","Type":"ContainerStarted","Data":"da74c77e7b5a3e51c3553089c60f51237fa7228e2ea0e205ca94c39810e79262"} Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.204945 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5ce6-account-create-update-4dlzg" event={"ID":"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02","Type":"ContainerStarted","Data":"513001e756fe18a74b1278717893a261c4ede1b3bb60b462b823c2cba58034b0"} Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.217276 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-s98g2" podStartSLOduration=2.217255183 podStartE2EDuration="2.217255183s" podCreationTimestamp="2025-12-11 09:41:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:41:39.208106571 +0000 UTC m=+1229.278886157" watchObservedRunningTime="2025-12-11 09:41:39.217255183 +0000 UTC m=+1229.288034769" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.238369 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5ce6-account-create-update-4dlzg" podStartSLOduration=2.238334298 podStartE2EDuration="2.238334298s" podCreationTimestamp="2025-12-11 09:41:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:41:39.237827235 +0000 UTC m=+1229.308606821" watchObservedRunningTime="2025-12-11 09:41:39.238334298 +0000 UTC m=+1229.309113884" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.261277 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-5hgrc-config-tmvlr"] Dec 11 09:41:39 crc kubenswrapper[4788]: E1211 09:41:39.261706 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d15af45-a8f2-4689-a633-82abfcd03bae" containerName="mariadb-database-create" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.261726 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d15af45-a8f2-4689-a633-82abfcd03bae" containerName="mariadb-database-create" Dec 11 09:41:39 crc kubenswrapper[4788]: E1211 09:41:39.261747 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="776102e1-f9bb-46db-bf9e-b3171a8d64fa" containerName="mariadb-account-create-update" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.261755 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="776102e1-f9bb-46db-bf9e-b3171a8d64fa" containerName="mariadb-account-create-update" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.261918 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d15af45-a8f2-4689-a633-82abfcd03bae" containerName="mariadb-database-create" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.262141 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="776102e1-f9bb-46db-bf9e-b3171a8d64fa" containerName="mariadb-account-create-update" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.262793 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.269744 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.276270 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-mlxpj" podStartSLOduration=2.27622912 podStartE2EDuration="2.27622912s" podCreationTimestamp="2025-12-11 09:41:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:41:39.271694164 +0000 UTC m=+1229.342473760" watchObservedRunningTime="2025-12-11 09:41:39.27622912 +0000 UTC m=+1229.347008706" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.291417 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-5hgrc-config-tmvlr"] Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.312930 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.312992 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-additional-scripts\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.313024 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-scripts\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.313057 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-log-ovn\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.313294 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run-ovn\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.313383 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7dzn\" (UniqueName: \"kubernetes.io/projected/b147f471-fec4-40a8-9d91-d67d261e6d34-kube-api-access-m7dzn\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.415404 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7dzn\" (UniqueName: \"kubernetes.io/projected/b147f471-fec4-40a8-9d91-d67d261e6d34-kube-api-access-m7dzn\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.415606 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.415664 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-additional-scripts\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.415704 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-scripts\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.415766 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-log-ovn\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.416030 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-log-ovn\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.416038 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.416800 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-additional-scripts\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.419219 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-scripts\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.419532 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run-ovn\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.419652 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run-ovn\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.439516 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7dzn\" (UniqueName: \"kubernetes.io/projected/b147f471-fec4-40a8-9d91-d67d261e6d34-kube-api-access-m7dzn\") pod \"ovn-controller-5hgrc-config-tmvlr\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:39 crc kubenswrapper[4788]: I1211 09:41:39.582653 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:40 crc kubenswrapper[4788]: I1211 09:41:40.095944 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-5hgrc-config-tmvlr"] Dec 11 09:41:40 crc kubenswrapper[4788]: I1211 09:41:40.215337 4788 generic.go:334] "Generic (PLEG): container finished" podID="7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02" containerID="da74c77e7b5a3e51c3553089c60f51237fa7228e2ea0e205ca94c39810e79262" exitCode=0 Dec 11 09:41:40 crc kubenswrapper[4788]: I1211 09:41:40.215500 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5ce6-account-create-update-4dlzg" event={"ID":"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02","Type":"ContainerDied","Data":"da74c77e7b5a3e51c3553089c60f51237fa7228e2ea0e205ca94c39810e79262"} Dec 11 09:41:40 crc kubenswrapper[4788]: I1211 09:41:40.217574 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-5hgrc-config-tmvlr" event={"ID":"b147f471-fec4-40a8-9d91-d67d261e6d34","Type":"ContainerStarted","Data":"85b61b5406721ae784eeae218d6568a00bf706c39d521c11f56697a8dbaaf21c"} Dec 11 09:41:40 crc kubenswrapper[4788]: I1211 09:41:40.221918 4788 generic.go:334] "Generic (PLEG): container finished" podID="638ccdd0-d9e8-4608-9eb9-eb3abebae04d" containerID="300d96478e62c8fae7b5ec769f1ba6cca4a1d1e3686c00d0a3abccd805eb1e86" exitCode=0 Dec 11 09:41:40 crc kubenswrapper[4788]: I1211 09:41:40.221996 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-s98g2" event={"ID":"638ccdd0-d9e8-4608-9eb9-eb3abebae04d","Type":"ContainerDied","Data":"300d96478e62c8fae7b5ec769f1ba6cca4a1d1e3686c00d0a3abccd805eb1e86"} Dec 11 09:41:40 crc kubenswrapper[4788]: I1211 09:41:40.240695 4788 generic.go:334] "Generic (PLEG): container finished" podID="ac89feb3-6e9d-4d4e-bfbc-313328583a65" containerID="8ac0b13f2c9eb948be42f6eed506390e7465eda044ed69b7f08b60752cf99e63" exitCode=0 Dec 11 09:41:40 crc kubenswrapper[4788]: I1211 09:41:40.240832 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-mlxpj" event={"ID":"ac89feb3-6e9d-4d4e-bfbc-313328583a65","Type":"ContainerDied","Data":"8ac0b13f2c9eb948be42f6eed506390e7465eda044ed69b7f08b60752cf99e63"} Dec 11 09:41:40 crc kubenswrapper[4788]: I1211 09:41:40.243764 4788 generic.go:334] "Generic (PLEG): container finished" podID="bceadd92-112d-46ef-bfa8-4fd844c01ebf" containerID="e4481afdbb3294155dce49e6a29f6a575c6a692d7b5f5f554c98bb8013149901" exitCode=0 Dec 11 09:41:40 crc kubenswrapper[4788]: I1211 09:41:40.243808 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-26ad-account-create-update-q7plk" event={"ID":"bceadd92-112d-46ef-bfa8-4fd844c01ebf","Type":"ContainerDied","Data":"e4481afdbb3294155dce49e6a29f6a575c6a692d7b5f5f554c98bb8013149901"} Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.251753 4788 generic.go:334] "Generic (PLEG): container finished" podID="b147f471-fec4-40a8-9d91-d67d261e6d34" containerID="86b19325e48b31462b96a8958c158830421c81882b80a1a572c29d6fff67e72f" exitCode=0 Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.251794 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-5hgrc-config-tmvlr" event={"ID":"b147f471-fec4-40a8-9d91-d67d261e6d34","Type":"ContainerDied","Data":"86b19325e48b31462b96a8958c158830421c81882b80a1a572c29d6fff67e72f"} Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.676025 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-s98g2" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.780065 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-operator-scripts\") pod \"638ccdd0-d9e8-4608-9eb9-eb3abebae04d\" (UID: \"638ccdd0-d9e8-4608-9eb9-eb3abebae04d\") " Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.780167 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5vzx\" (UniqueName: \"kubernetes.io/projected/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-kube-api-access-z5vzx\") pod \"638ccdd0-d9e8-4608-9eb9-eb3abebae04d\" (UID: \"638ccdd0-d9e8-4608-9eb9-eb3abebae04d\") " Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.781150 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "638ccdd0-d9e8-4608-9eb9-eb3abebae04d" (UID: "638ccdd0-d9e8-4608-9eb9-eb3abebae04d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.791802 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-kube-api-access-z5vzx" (OuterVolumeSpecName: "kube-api-access-z5vzx") pod "638ccdd0-d9e8-4608-9eb9-eb3abebae04d" (UID: "638ccdd0-d9e8-4608-9eb9-eb3abebae04d"). InnerVolumeSpecName "kube-api-access-z5vzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.860976 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-26ad-account-create-update-q7plk" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.871751 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mlxpj" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.883801 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7lht\" (UniqueName: \"kubernetes.io/projected/ac89feb3-6e9d-4d4e-bfbc-313328583a65-kube-api-access-k7lht\") pod \"ac89feb3-6e9d-4d4e-bfbc-313328583a65\" (UID: \"ac89feb3-6e9d-4d4e-bfbc-313328583a65\") " Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.883866 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bceadd92-112d-46ef-bfa8-4fd844c01ebf-operator-scripts\") pod \"bceadd92-112d-46ef-bfa8-4fd844c01ebf\" (UID: \"bceadd92-112d-46ef-bfa8-4fd844c01ebf\") " Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.883951 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac89feb3-6e9d-4d4e-bfbc-313328583a65-operator-scripts\") pod \"ac89feb3-6e9d-4d4e-bfbc-313328583a65\" (UID: \"ac89feb3-6e9d-4d4e-bfbc-313328583a65\") " Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.884416 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.884442 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5vzx\" (UniqueName: \"kubernetes.io/projected/638ccdd0-d9e8-4608-9eb9-eb3abebae04d-kube-api-access-z5vzx\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.884444 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bceadd92-112d-46ef-bfa8-4fd844c01ebf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bceadd92-112d-46ef-bfa8-4fd844c01ebf" (UID: "bceadd92-112d-46ef-bfa8-4fd844c01ebf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.884781 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac89feb3-6e9d-4d4e-bfbc-313328583a65-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ac89feb3-6e9d-4d4e-bfbc-313328583a65" (UID: "ac89feb3-6e9d-4d4e-bfbc-313328583a65"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.886578 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5ce6-account-create-update-4dlzg" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.891531 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac89feb3-6e9d-4d4e-bfbc-313328583a65-kube-api-access-k7lht" (OuterVolumeSpecName: "kube-api-access-k7lht") pod "ac89feb3-6e9d-4d4e-bfbc-313328583a65" (UID: "ac89feb3-6e9d-4d4e-bfbc-313328583a65"). InnerVolumeSpecName "kube-api-access-k7lht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.985628 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcrnl\" (UniqueName: \"kubernetes.io/projected/bceadd92-112d-46ef-bfa8-4fd844c01ebf-kube-api-access-mcrnl\") pod \"bceadd92-112d-46ef-bfa8-4fd844c01ebf\" (UID: \"bceadd92-112d-46ef-bfa8-4fd844c01ebf\") " Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.985992 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac89feb3-6e9d-4d4e-bfbc-313328583a65-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.986016 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7lht\" (UniqueName: \"kubernetes.io/projected/ac89feb3-6e9d-4d4e-bfbc-313328583a65-kube-api-access-k7lht\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.986030 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bceadd92-112d-46ef-bfa8-4fd844c01ebf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:41 crc kubenswrapper[4788]: I1211 09:41:41.988635 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bceadd92-112d-46ef-bfa8-4fd844c01ebf-kube-api-access-mcrnl" (OuterVolumeSpecName: "kube-api-access-mcrnl") pod "bceadd92-112d-46ef-bfa8-4fd844c01ebf" (UID: "bceadd92-112d-46ef-bfa8-4fd844c01ebf"). InnerVolumeSpecName "kube-api-access-mcrnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.087635 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-operator-scripts\") pod \"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02\" (UID: \"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02\") " Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.087707 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb8w7\" (UniqueName: \"kubernetes.io/projected/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-kube-api-access-lb8w7\") pod \"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02\" (UID: \"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02\") " Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.088053 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02" (UID: "7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.088340 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.088358 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcrnl\" (UniqueName: \"kubernetes.io/projected/bceadd92-112d-46ef-bfa8-4fd844c01ebf-kube-api-access-mcrnl\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.091780 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-kube-api-access-lb8w7" (OuterVolumeSpecName: "kube-api-access-lb8w7") pod "7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02" (UID: "7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02"). InnerVolumeSpecName "kube-api-access-lb8w7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.189738 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb8w7\" (UniqueName: \"kubernetes.io/projected/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02-kube-api-access-lb8w7\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.263268 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5ce6-account-create-update-4dlzg" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.263264 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5ce6-account-create-update-4dlzg" event={"ID":"7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02","Type":"ContainerDied","Data":"513001e756fe18a74b1278717893a261c4ede1b3bb60b462b823c2cba58034b0"} Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.264600 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="513001e756fe18a74b1278717893a261c4ede1b3bb60b462b823c2cba58034b0" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.264885 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-s98g2" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.264891 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-s98g2" event={"ID":"638ccdd0-d9e8-4608-9eb9-eb3abebae04d","Type":"ContainerDied","Data":"8a6b00b9e68e41b6e8ec67ae1a87dad5b37c6a9b34cf5c7c874f557df9fb55af"} Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.265475 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a6b00b9e68e41b6e8ec67ae1a87dad5b37c6a9b34cf5c7c874f557df9fb55af" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.267333 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mlxpj" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.267373 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-mlxpj" event={"ID":"ac89feb3-6e9d-4d4e-bfbc-313328583a65","Type":"ContainerDied","Data":"c12f5b86186888cefbb0e41a2110732f23511a51b4333b6b6c7876decdc0cdfb"} Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.267472 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c12f5b86186888cefbb0e41a2110732f23511a51b4333b6b6c7876decdc0cdfb" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.269083 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-26ad-account-create-update-q7plk" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.272422 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-26ad-account-create-update-q7plk" event={"ID":"bceadd92-112d-46ef-bfa8-4fd844c01ebf","Type":"ContainerDied","Data":"f88528c28168576c711e64f5107a5e6cf9eaf514216284c992ebf0febabe9ed6"} Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.272481 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f88528c28168576c711e64f5107a5e6cf9eaf514216284c992ebf0febabe9ed6" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.502135 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.699937 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7dzn\" (UniqueName: \"kubernetes.io/projected/b147f471-fec4-40a8-9d91-d67d261e6d34-kube-api-access-m7dzn\") pod \"b147f471-fec4-40a8-9d91-d67d261e6d34\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700016 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-scripts\") pod \"b147f471-fec4-40a8-9d91-d67d261e6d34\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700046 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run-ovn\") pod \"b147f471-fec4-40a8-9d91-d67d261e6d34\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700083 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-log-ovn\") pod \"b147f471-fec4-40a8-9d91-d67d261e6d34\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700112 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run\") pod \"b147f471-fec4-40a8-9d91-d67d261e6d34\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700176 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "b147f471-fec4-40a8-9d91-d67d261e6d34" (UID: "b147f471-fec4-40a8-9d91-d67d261e6d34"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700214 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-additional-scripts\") pod \"b147f471-fec4-40a8-9d91-d67d261e6d34\" (UID: \"b147f471-fec4-40a8-9d91-d67d261e6d34\") " Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700171 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "b147f471-fec4-40a8-9d91-d67d261e6d34" (UID: "b147f471-fec4-40a8-9d91-d67d261e6d34"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700247 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run" (OuterVolumeSpecName: "var-run") pod "b147f471-fec4-40a8-9d91-d67d261e6d34" (UID: "b147f471-fec4-40a8-9d91-d67d261e6d34"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700913 4788 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700940 4788 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700952 4788 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b147f471-fec4-40a8-9d91-d67d261e6d34-var-run\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.700988 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "b147f471-fec4-40a8-9d91-d67d261e6d34" (UID: "b147f471-fec4-40a8-9d91-d67d261e6d34"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.701269 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-scripts" (OuterVolumeSpecName: "scripts") pod "b147f471-fec4-40a8-9d91-d67d261e6d34" (UID: "b147f471-fec4-40a8-9d91-d67d261e6d34"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.718489 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b147f471-fec4-40a8-9d91-d67d261e6d34-kube-api-access-m7dzn" (OuterVolumeSpecName: "kube-api-access-m7dzn") pod "b147f471-fec4-40a8-9d91-d67d261e6d34" (UID: "b147f471-fec4-40a8-9d91-d67d261e6d34"). InnerVolumeSpecName "kube-api-access-m7dzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.803078 4788 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.803116 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7dzn\" (UniqueName: \"kubernetes.io/projected/b147f471-fec4-40a8-9d91-d67d261e6d34-kube-api-access-m7dzn\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.803134 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b147f471-fec4-40a8-9d91-d67d261e6d34-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:42 crc kubenswrapper[4788]: I1211 09:41:42.904685 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:41:42 crc kubenswrapper[4788]: E1211 09:41:42.904916 4788 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 11 09:41:42 crc kubenswrapper[4788]: E1211 09:41:42.904954 4788 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 11 09:41:42 crc kubenswrapper[4788]: E1211 09:41:42.905028 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift podName:874f552a-7856-439c-937c-a87d9c15305c nodeName:}" failed. No retries permitted until 2025-12-11 09:42:14.90500892 +0000 UTC m=+1264.975788506 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift") pod "swift-storage-0" (UID: "874f552a-7856-439c-937c-a87d9c15305c") : configmap "swift-ring-files" not found Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.279073 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-5hgrc-config-tmvlr" event={"ID":"b147f471-fec4-40a8-9d91-d67d261e6d34","Type":"ContainerDied","Data":"85b61b5406721ae784eeae218d6568a00bf706c39d521c11f56697a8dbaaf21c"} Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.279113 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85b61b5406721ae784eeae218d6568a00bf706c39d521c11f56697a8dbaaf21c" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.279141 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-5hgrc-config-tmvlr" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.668481 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-5hgrc-config-tmvlr"] Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.677627 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-5hgrc-config-tmvlr"] Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.730410 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-5hgrc-config-jsgc9"] Dec 11 09:41:43 crc kubenswrapper[4788]: E1211 09:41:43.730863 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02" containerName="mariadb-account-create-update" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.730884 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02" containerName="mariadb-account-create-update" Dec 11 09:41:43 crc kubenswrapper[4788]: E1211 09:41:43.730896 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b147f471-fec4-40a8-9d91-d67d261e6d34" containerName="ovn-config" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.730907 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b147f471-fec4-40a8-9d91-d67d261e6d34" containerName="ovn-config" Dec 11 09:41:43 crc kubenswrapper[4788]: E1211 09:41:43.730931 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac89feb3-6e9d-4d4e-bfbc-313328583a65" containerName="mariadb-database-create" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.730938 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac89feb3-6e9d-4d4e-bfbc-313328583a65" containerName="mariadb-database-create" Dec 11 09:41:43 crc kubenswrapper[4788]: E1211 09:41:43.730961 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bceadd92-112d-46ef-bfa8-4fd844c01ebf" containerName="mariadb-account-create-update" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.730967 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="bceadd92-112d-46ef-bfa8-4fd844c01ebf" containerName="mariadb-account-create-update" Dec 11 09:41:43 crc kubenswrapper[4788]: E1211 09:41:43.730977 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="638ccdd0-d9e8-4608-9eb9-eb3abebae04d" containerName="mariadb-database-create" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.730983 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="638ccdd0-d9e8-4608-9eb9-eb3abebae04d" containerName="mariadb-database-create" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.731174 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b147f471-fec4-40a8-9d91-d67d261e6d34" containerName="ovn-config" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.731195 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="638ccdd0-d9e8-4608-9eb9-eb3abebae04d" containerName="mariadb-database-create" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.731208 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02" containerName="mariadb-account-create-update" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.731218 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac89feb3-6e9d-4d4e-bfbc-313328583a65" containerName="mariadb-database-create" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.731244 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="bceadd92-112d-46ef-bfa8-4fd844c01ebf" containerName="mariadb-account-create-update" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.731915 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.735672 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.757481 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-5hgrc-config-jsgc9"] Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.922220 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-log-ovn\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.922676 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhr2l\" (UniqueName: \"kubernetes.io/projected/185f652d-12ed-4aea-8ee9-91dee8c3112e-kube-api-access-qhr2l\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.922772 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.922864 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-scripts\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.922887 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-additional-scripts\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.923092 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run-ovn\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:43 crc kubenswrapper[4788]: I1211 09:41:43.950839 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-5hgrc" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.024219 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.024350 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-scripts\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.024370 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-additional-scripts\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.024444 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run-ovn\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.024477 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-log-ovn\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.024501 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhr2l\" (UniqueName: \"kubernetes.io/projected/185f652d-12ed-4aea-8ee9-91dee8c3112e-kube-api-access-qhr2l\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.024652 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.024916 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run-ovn\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.024974 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-log-ovn\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.025571 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-additional-scripts\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.027019 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-scripts\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.070277 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhr2l\" (UniqueName: \"kubernetes.io/projected/185f652d-12ed-4aea-8ee9-91dee8c3112e-kube-api-access-qhr2l\") pod \"ovn-controller-5hgrc-config-jsgc9\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.102690 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-8k66n"] Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.104001 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.108223 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.112152 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-49zhk" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.127429 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-8k66n"] Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.227822 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2vdj\" (UniqueName: \"kubernetes.io/projected/ed639405-5b9d-491f-b673-3d0657a14ecf-kube-api-access-r2vdj\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.227905 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-db-sync-config-data\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.227971 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-combined-ca-bundle\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.228006 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-config-data\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.329775 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2vdj\" (UniqueName: \"kubernetes.io/projected/ed639405-5b9d-491f-b673-3d0657a14ecf-kube-api-access-r2vdj\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.329847 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-db-sync-config-data\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.329875 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-combined-ca-bundle\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.329900 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-config-data\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.335818 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-db-sync-config-data\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.335908 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-config-data\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.337666 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-combined-ca-bundle\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.350776 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2vdj\" (UniqueName: \"kubernetes.io/projected/ed639405-5b9d-491f-b673-3d0657a14ecf-kube-api-access-r2vdj\") pod \"glance-db-sync-8k66n\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.368907 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.424133 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8k66n" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.507282 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b147f471-fec4-40a8-9d91-d67d261e6d34" path="/var/lib/kubelet/pods/b147f471-fec4-40a8-9d91-d67d261e6d34/volumes" Dec 11 09:41:44 crc kubenswrapper[4788]: I1211 09:41:44.870041 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-5hgrc-config-jsgc9"] Dec 11 09:41:44 crc kubenswrapper[4788]: W1211 09:41:44.872278 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod185f652d_12ed_4aea_8ee9_91dee8c3112e.slice/crio-70bdeb38e647a45ed5d2e22d6244f14b296f6a1a297e4840ace337133b17de93 WatchSource:0}: Error finding container 70bdeb38e647a45ed5d2e22d6244f14b296f6a1a297e4840ace337133b17de93: Status 404 returned error can't find the container with id 70bdeb38e647a45ed5d2e22d6244f14b296f6a1a297e4840ace337133b17de93 Dec 11 09:41:45 crc kubenswrapper[4788]: I1211 09:41:45.076508 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-8k66n"] Dec 11 09:41:45 crc kubenswrapper[4788]: I1211 09:41:45.301147 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-5hgrc-config-jsgc9" event={"ID":"185f652d-12ed-4aea-8ee9-91dee8c3112e","Type":"ContainerStarted","Data":"70bdeb38e647a45ed5d2e22d6244f14b296f6a1a297e4840ace337133b17de93"} Dec 11 09:41:45 crc kubenswrapper[4788]: I1211 09:41:45.302796 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8k66n" event={"ID":"ed639405-5b9d-491f-b673-3d0657a14ecf","Type":"ContainerStarted","Data":"2ab9347f556f4c02219d258549c26c0e09b6eae66bea2f818c078f93ccc9baf6"} Dec 11 09:41:47 crc kubenswrapper[4788]: I1211 09:41:47.322081 4788 generic.go:334] "Generic (PLEG): container finished" podID="185f652d-12ed-4aea-8ee9-91dee8c3112e" containerID="87b5bc57894128ff451222c65a4ef22a3dda5c56f40835cc288b55267938f418" exitCode=0 Dec 11 09:41:47 crc kubenswrapper[4788]: I1211 09:41:47.322562 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-5hgrc-config-jsgc9" event={"ID":"185f652d-12ed-4aea-8ee9-91dee8c3112e","Type":"ContainerDied","Data":"87b5bc57894128ff451222c65a4ef22a3dda5c56f40835cc288b55267938f418"} Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.663854 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.837566 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="ef72aa19-1387-4180-957c-4bfec95e5562" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.841829 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhr2l\" (UniqueName: \"kubernetes.io/projected/185f652d-12ed-4aea-8ee9-91dee8c3112e-kube-api-access-qhr2l\") pod \"185f652d-12ed-4aea-8ee9-91dee8c3112e\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.841931 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-additional-scripts\") pod \"185f652d-12ed-4aea-8ee9-91dee8c3112e\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.841981 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-log-ovn\") pod \"185f652d-12ed-4aea-8ee9-91dee8c3112e\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.842014 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-scripts\") pod \"185f652d-12ed-4aea-8ee9-91dee8c3112e\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.842058 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run-ovn\") pod \"185f652d-12ed-4aea-8ee9-91dee8c3112e\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.842119 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run\") pod \"185f652d-12ed-4aea-8ee9-91dee8c3112e\" (UID: \"185f652d-12ed-4aea-8ee9-91dee8c3112e\") " Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.842317 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run" (OuterVolumeSpecName: "var-run") pod "185f652d-12ed-4aea-8ee9-91dee8c3112e" (UID: "185f652d-12ed-4aea-8ee9-91dee8c3112e"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.844113 4788 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.844198 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "185f652d-12ed-4aea-8ee9-91dee8c3112e" (UID: "185f652d-12ed-4aea-8ee9-91dee8c3112e"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.844486 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "185f652d-12ed-4aea-8ee9-91dee8c3112e" (UID: "185f652d-12ed-4aea-8ee9-91dee8c3112e"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.844970 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "185f652d-12ed-4aea-8ee9-91dee8c3112e" (UID: "185f652d-12ed-4aea-8ee9-91dee8c3112e"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.846774 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-scripts" (OuterVolumeSpecName: "scripts") pod "185f652d-12ed-4aea-8ee9-91dee8c3112e" (UID: "185f652d-12ed-4aea-8ee9-91dee8c3112e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.850026 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/185f652d-12ed-4aea-8ee9-91dee8c3112e-kube-api-access-qhr2l" (OuterVolumeSpecName: "kube-api-access-qhr2l") pod "185f652d-12ed-4aea-8ee9-91dee8c3112e" (UID: "185f652d-12ed-4aea-8ee9-91dee8c3112e"). InnerVolumeSpecName "kube-api-access-qhr2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.854345 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.945204 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhr2l\" (UniqueName: \"kubernetes.io/projected/185f652d-12ed-4aea-8ee9-91dee8c3112e-kube-api-access-qhr2l\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.945249 4788 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.945260 4788 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.945270 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/185f652d-12ed-4aea-8ee9-91dee8c3112e-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:48 crc kubenswrapper[4788]: I1211 09:41:48.945278 4788 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/185f652d-12ed-4aea-8ee9-91dee8c3112e-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 11 09:41:49 crc kubenswrapper[4788]: I1211 09:41:49.341505 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-5hgrc-config-jsgc9" event={"ID":"185f652d-12ed-4aea-8ee9-91dee8c3112e","Type":"ContainerDied","Data":"70bdeb38e647a45ed5d2e22d6244f14b296f6a1a297e4840ace337133b17de93"} Dec 11 09:41:49 crc kubenswrapper[4788]: I1211 09:41:49.341871 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70bdeb38e647a45ed5d2e22d6244f14b296f6a1a297e4840ace337133b17de93" Dec 11 09:41:49 crc kubenswrapper[4788]: I1211 09:41:49.341939 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-5hgrc-config-jsgc9" Dec 11 09:41:49 crc kubenswrapper[4788]: I1211 09:41:49.750604 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-5hgrc-config-jsgc9"] Dec 11 09:41:49 crc kubenswrapper[4788]: I1211 09:41:49.764669 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-5hgrc-config-jsgc9"] Dec 11 09:41:50 crc kubenswrapper[4788]: I1211 09:41:50.524837 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="185f652d-12ed-4aea-8ee9-91dee8c3112e" path="/var/lib/kubelet/pods/185f652d-12ed-4aea-8ee9-91dee8c3112e/volumes" Dec 11 09:41:52 crc kubenswrapper[4788]: I1211 09:41:52.368162 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m5bdb" event={"ID":"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1","Type":"ContainerStarted","Data":"810eb962bd5fb8cea653e73316b108a5591ba079ec2e77ba1148c22e75094ef0"} Dec 11 09:41:52 crc kubenswrapper[4788]: I1211 09:41:52.389966 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-m5bdb" podStartSLOduration=2.989133044 podStartE2EDuration="41.389944192s" podCreationTimestamp="2025-12-11 09:41:11 +0000 UTC" firstStartedPulling="2025-12-11 09:41:13.244623942 +0000 UTC m=+1203.315403528" lastFinishedPulling="2025-12-11 09:41:51.6454351 +0000 UTC m=+1241.716214676" observedRunningTime="2025-12-11 09:41:52.3894698 +0000 UTC m=+1242.460249386" watchObservedRunningTime="2025-12-11 09:41:52.389944192 +0000 UTC m=+1242.460723788" Dec 11 09:41:58 crc kubenswrapper[4788]: I1211 09:41:58.839593 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 11 09:41:58 crc kubenswrapper[4788]: I1211 09:41:58.867023 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.207541 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-z48xt"] Dec 11 09:41:59 crc kubenswrapper[4788]: E1211 09:41:59.208068 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="185f652d-12ed-4aea-8ee9-91dee8c3112e" containerName="ovn-config" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.208086 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="185f652d-12ed-4aea-8ee9-91dee8c3112e" containerName="ovn-config" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.208313 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="185f652d-12ed-4aea-8ee9-91dee8c3112e" containerName="ovn-config" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.209029 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-z48xt" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.222842 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-z48xt"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.343192 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-hfflh"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.344571 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-hfflh" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.360509 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-75ef-account-create-update-k799k"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.364053 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-75ef-account-create-update-k799k" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.369514 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.448142 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c99537b-1ec9-4cf3-8fc8-12466dcca552-operator-scripts\") pod \"cinder-db-create-z48xt\" (UID: \"1c99537b-1ec9-4cf3-8fc8-12466dcca552\") " pod="openstack/cinder-db-create-z48xt" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.456946 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9clj\" (UniqueName: \"kubernetes.io/projected/1c99537b-1ec9-4cf3-8fc8-12466dcca552-kube-api-access-s9clj\") pod \"cinder-db-create-z48xt\" (UID: \"1c99537b-1ec9-4cf3-8fc8-12466dcca552\") " pod="openstack/cinder-db-create-z48xt" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.549460 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-hfflh"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.559622 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54xft\" (UniqueName: \"kubernetes.io/projected/ed92d625-0e3b-43c8-9a8b-c228c701bcba-kube-api-access-54xft\") pod \"barbican-db-create-hfflh\" (UID: \"ed92d625-0e3b-43c8-9a8b-c228c701bcba\") " pod="openstack/barbican-db-create-hfflh" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.581813 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c99537b-1ec9-4cf3-8fc8-12466dcca552-operator-scripts\") pod \"cinder-db-create-z48xt\" (UID: \"1c99537b-1ec9-4cf3-8fc8-12466dcca552\") " pod="openstack/cinder-db-create-z48xt" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.580708 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c99537b-1ec9-4cf3-8fc8-12466dcca552-operator-scripts\") pod \"cinder-db-create-z48xt\" (UID: \"1c99537b-1ec9-4cf3-8fc8-12466dcca552\") " pod="openstack/cinder-db-create-z48xt" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.583531 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6wgm\" (UniqueName: \"kubernetes.io/projected/3d51ecca-226a-42a2-ad6c-249628ee18b7-kube-api-access-j6wgm\") pod \"cinder-75ef-account-create-update-k799k\" (UID: \"3d51ecca-226a-42a2-ad6c-249628ee18b7\") " pod="openstack/cinder-75ef-account-create-update-k799k" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.583632 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9clj\" (UniqueName: \"kubernetes.io/projected/1c99537b-1ec9-4cf3-8fc8-12466dcca552-kube-api-access-s9clj\") pod \"cinder-db-create-z48xt\" (UID: \"1c99537b-1ec9-4cf3-8fc8-12466dcca552\") " pod="openstack/cinder-db-create-z48xt" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.583794 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed92d625-0e3b-43c8-9a8b-c228c701bcba-operator-scripts\") pod \"barbican-db-create-hfflh\" (UID: \"ed92d625-0e3b-43c8-9a8b-c228c701bcba\") " pod="openstack/barbican-db-create-hfflh" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.583906 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d51ecca-226a-42a2-ad6c-249628ee18b7-operator-scripts\") pod \"cinder-75ef-account-create-update-k799k\" (UID: \"3d51ecca-226a-42a2-ad6c-249628ee18b7\") " pod="openstack/cinder-75ef-account-create-update-k799k" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.651141 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9clj\" (UniqueName: \"kubernetes.io/projected/1c99537b-1ec9-4cf3-8fc8-12466dcca552-kube-api-access-s9clj\") pod \"cinder-db-create-z48xt\" (UID: \"1c99537b-1ec9-4cf3-8fc8-12466dcca552\") " pod="openstack/cinder-db-create-z48xt" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.669814 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-75ef-account-create-update-k799k"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.690760 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6wgm\" (UniqueName: \"kubernetes.io/projected/3d51ecca-226a-42a2-ad6c-249628ee18b7-kube-api-access-j6wgm\") pod \"cinder-75ef-account-create-update-k799k\" (UID: \"3d51ecca-226a-42a2-ad6c-249628ee18b7\") " pod="openstack/cinder-75ef-account-create-update-k799k" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.690845 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed92d625-0e3b-43c8-9a8b-c228c701bcba-operator-scripts\") pod \"barbican-db-create-hfflh\" (UID: \"ed92d625-0e3b-43c8-9a8b-c228c701bcba\") " pod="openstack/barbican-db-create-hfflh" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.690876 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d51ecca-226a-42a2-ad6c-249628ee18b7-operator-scripts\") pod \"cinder-75ef-account-create-update-k799k\" (UID: \"3d51ecca-226a-42a2-ad6c-249628ee18b7\") " pod="openstack/cinder-75ef-account-create-update-k799k" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.690916 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54xft\" (UniqueName: \"kubernetes.io/projected/ed92d625-0e3b-43c8-9a8b-c228c701bcba-kube-api-access-54xft\") pod \"barbican-db-create-hfflh\" (UID: \"ed92d625-0e3b-43c8-9a8b-c228c701bcba\") " pod="openstack/barbican-db-create-hfflh" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.692422 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed92d625-0e3b-43c8-9a8b-c228c701bcba-operator-scripts\") pod \"barbican-db-create-hfflh\" (UID: \"ed92d625-0e3b-43c8-9a8b-c228c701bcba\") " pod="openstack/barbican-db-create-hfflh" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.692963 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d51ecca-226a-42a2-ad6c-249628ee18b7-operator-scripts\") pod \"cinder-75ef-account-create-update-k799k\" (UID: \"3d51ecca-226a-42a2-ad6c-249628ee18b7\") " pod="openstack/cinder-75ef-account-create-update-k799k" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.703485 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-297b-account-create-update-4fsxw"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.704995 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-297b-account-create-update-4fsxw" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.708539 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-297b-account-create-update-4fsxw"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.712174 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.722797 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6wgm\" (UniqueName: \"kubernetes.io/projected/3d51ecca-226a-42a2-ad6c-249628ee18b7-kube-api-access-j6wgm\") pod \"cinder-75ef-account-create-update-k799k\" (UID: \"3d51ecca-226a-42a2-ad6c-249628ee18b7\") " pod="openstack/cinder-75ef-account-create-update-k799k" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.722865 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54xft\" (UniqueName: \"kubernetes.io/projected/ed92d625-0e3b-43c8-9a8b-c228c701bcba-kube-api-access-54xft\") pod \"barbican-db-create-hfflh\" (UID: \"ed92d625-0e3b-43c8-9a8b-c228c701bcba\") " pod="openstack/barbican-db-create-hfflh" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.792508 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-operator-scripts\") pod \"barbican-297b-account-create-update-4fsxw\" (UID: \"7d73232b-acdd-4586-a02d-e2f22ce3e0fa\") " pod="openstack/barbican-297b-account-create-update-4fsxw" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.792843 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvrkh\" (UniqueName: \"kubernetes.io/projected/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-kube-api-access-jvrkh\") pod \"barbican-297b-account-create-update-4fsxw\" (UID: \"7d73232b-acdd-4586-a02d-e2f22ce3e0fa\") " pod="openstack/barbican-297b-account-create-update-4fsxw" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.807627 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-5xddv"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.809104 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-5xddv" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.815409 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.815644 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gthls" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.815779 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.816182 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.828696 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-5xddv"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.832752 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-z48xt" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.843637 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-v4wgz"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.848726 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-v4wgz" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.854969 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-v4wgz"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.888053 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5988-account-create-update-lfdwj"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.889200 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5988-account-create-update-lfdwj"] Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.889293 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5988-account-create-update-lfdwj" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.920534 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvrkh\" (UniqueName: \"kubernetes.io/projected/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-kube-api-access-jvrkh\") pod \"barbican-297b-account-create-update-4fsxw\" (UID: \"7d73232b-acdd-4586-a02d-e2f22ce3e0fa\") " pod="openstack/barbican-297b-account-create-update-4fsxw" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.920611 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-combined-ca-bundle\") pod \"keystone-db-sync-5xddv\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " pod="openstack/keystone-db-sync-5xddv" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.920680 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn2cs\" (UniqueName: \"kubernetes.io/projected/97b7b5ca-b073-49a3-9dc5-adce0490e178-kube-api-access-dn2cs\") pod \"keystone-db-sync-5xddv\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " pod="openstack/keystone-db-sync-5xddv" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.920759 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-config-data\") pod \"keystone-db-sync-5xddv\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " pod="openstack/keystone-db-sync-5xddv" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.920832 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-operator-scripts\") pod \"barbican-297b-account-create-update-4fsxw\" (UID: \"7d73232b-acdd-4586-a02d-e2f22ce3e0fa\") " pod="openstack/barbican-297b-account-create-update-4fsxw" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.921565 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.921843 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-operator-scripts\") pod \"barbican-297b-account-create-update-4fsxw\" (UID: \"7d73232b-acdd-4586-a02d-e2f22ce3e0fa\") " pod="openstack/barbican-297b-account-create-update-4fsxw" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.939731 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvrkh\" (UniqueName: \"kubernetes.io/projected/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-kube-api-access-jvrkh\") pod \"barbican-297b-account-create-update-4fsxw\" (UID: \"7d73232b-acdd-4586-a02d-e2f22ce3e0fa\") " pod="openstack/barbican-297b-account-create-update-4fsxw" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.972491 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-hfflh" Dec 11 09:41:59 crc kubenswrapper[4788]: I1211 09:41:59.986656 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-75ef-account-create-update-k799k" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.022909 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c51946a-22cd-4576-8c5d-a37ec2746c2e-operator-scripts\") pod \"neutron-db-create-v4wgz\" (UID: \"0c51946a-22cd-4576-8c5d-a37ec2746c2e\") " pod="openstack/neutron-db-create-v4wgz" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.023001 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-combined-ca-bundle\") pod \"keystone-db-sync-5xddv\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " pod="openstack/keystone-db-sync-5xddv" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.023043 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2q7d\" (UniqueName: \"kubernetes.io/projected/827fe8ce-f75e-4f71-b2d6-5d791513c568-kube-api-access-q2q7d\") pod \"neutron-5988-account-create-update-lfdwj\" (UID: \"827fe8ce-f75e-4f71-b2d6-5d791513c568\") " pod="openstack/neutron-5988-account-create-update-lfdwj" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.023080 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26jss\" (UniqueName: \"kubernetes.io/projected/0c51946a-22cd-4576-8c5d-a37ec2746c2e-kube-api-access-26jss\") pod \"neutron-db-create-v4wgz\" (UID: \"0c51946a-22cd-4576-8c5d-a37ec2746c2e\") " pod="openstack/neutron-db-create-v4wgz" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.023172 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn2cs\" (UniqueName: \"kubernetes.io/projected/97b7b5ca-b073-49a3-9dc5-adce0490e178-kube-api-access-dn2cs\") pod \"keystone-db-sync-5xddv\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " pod="openstack/keystone-db-sync-5xddv" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.023265 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/827fe8ce-f75e-4f71-b2d6-5d791513c568-operator-scripts\") pod \"neutron-5988-account-create-update-lfdwj\" (UID: \"827fe8ce-f75e-4f71-b2d6-5d791513c568\") " pod="openstack/neutron-5988-account-create-update-lfdwj" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.023298 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-config-data\") pod \"keystone-db-sync-5xddv\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " pod="openstack/keystone-db-sync-5xddv" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.027749 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-config-data\") pod \"keystone-db-sync-5xddv\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " pod="openstack/keystone-db-sync-5xddv" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.028610 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-combined-ca-bundle\") pod \"keystone-db-sync-5xddv\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " pod="openstack/keystone-db-sync-5xddv" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.043481 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn2cs\" (UniqueName: \"kubernetes.io/projected/97b7b5ca-b073-49a3-9dc5-adce0490e178-kube-api-access-dn2cs\") pod \"keystone-db-sync-5xddv\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " pod="openstack/keystone-db-sync-5xddv" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.124757 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-297b-account-create-update-4fsxw" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.125344 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/827fe8ce-f75e-4f71-b2d6-5d791513c568-operator-scripts\") pod \"neutron-5988-account-create-update-lfdwj\" (UID: \"827fe8ce-f75e-4f71-b2d6-5d791513c568\") " pod="openstack/neutron-5988-account-create-update-lfdwj" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.125643 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c51946a-22cd-4576-8c5d-a37ec2746c2e-operator-scripts\") pod \"neutron-db-create-v4wgz\" (UID: \"0c51946a-22cd-4576-8c5d-a37ec2746c2e\") " pod="openstack/neutron-db-create-v4wgz" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.125743 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2q7d\" (UniqueName: \"kubernetes.io/projected/827fe8ce-f75e-4f71-b2d6-5d791513c568-kube-api-access-q2q7d\") pod \"neutron-5988-account-create-update-lfdwj\" (UID: \"827fe8ce-f75e-4f71-b2d6-5d791513c568\") " pod="openstack/neutron-5988-account-create-update-lfdwj" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.125796 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26jss\" (UniqueName: \"kubernetes.io/projected/0c51946a-22cd-4576-8c5d-a37ec2746c2e-kube-api-access-26jss\") pod \"neutron-db-create-v4wgz\" (UID: \"0c51946a-22cd-4576-8c5d-a37ec2746c2e\") " pod="openstack/neutron-db-create-v4wgz" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.126415 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/827fe8ce-f75e-4f71-b2d6-5d791513c568-operator-scripts\") pod \"neutron-5988-account-create-update-lfdwj\" (UID: \"827fe8ce-f75e-4f71-b2d6-5d791513c568\") " pod="openstack/neutron-5988-account-create-update-lfdwj" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.126664 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c51946a-22cd-4576-8c5d-a37ec2746c2e-operator-scripts\") pod \"neutron-db-create-v4wgz\" (UID: \"0c51946a-22cd-4576-8c5d-a37ec2746c2e\") " pod="openstack/neutron-db-create-v4wgz" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.143818 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26jss\" (UniqueName: \"kubernetes.io/projected/0c51946a-22cd-4576-8c5d-a37ec2746c2e-kube-api-access-26jss\") pod \"neutron-db-create-v4wgz\" (UID: \"0c51946a-22cd-4576-8c5d-a37ec2746c2e\") " pod="openstack/neutron-db-create-v4wgz" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.147714 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2q7d\" (UniqueName: \"kubernetes.io/projected/827fe8ce-f75e-4f71-b2d6-5d791513c568-kube-api-access-q2q7d\") pod \"neutron-5988-account-create-update-lfdwj\" (UID: \"827fe8ce-f75e-4f71-b2d6-5d791513c568\") " pod="openstack/neutron-5988-account-create-update-lfdwj" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.149457 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-5xddv" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.227956 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-v4wgz" Dec 11 09:42:00 crc kubenswrapper[4788]: I1211 09:42:00.287701 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5988-account-create-update-lfdwj" Dec 11 09:42:14 crc kubenswrapper[4788]: I1211 09:42:14.986910 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:42:14 crc kubenswrapper[4788]: E1211 09:42:14.987143 4788 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 11 09:42:14 crc kubenswrapper[4788]: E1211 09:42:14.987580 4788 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 11 09:42:14 crc kubenswrapper[4788]: E1211 09:42:14.987744 4788 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift podName:874f552a-7856-439c-937c-a87d9c15305c nodeName:}" failed. No retries permitted until 2025-12-11 09:43:18.987706956 +0000 UTC m=+1329.058486542 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift") pod "swift-storage-0" (UID: "874f552a-7856-439c-937c-a87d9c15305c") : configmap "swift-ring-files" not found Dec 11 09:42:15 crc kubenswrapper[4788]: E1211 09:42:15.484154 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 11 09:42:15 crc kubenswrapper[4788]: E1211 09:42:15.484564 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r2vdj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-8k66n_openstack(ed639405-5b9d-491f-b673-3d0657a14ecf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:42:15 crc kubenswrapper[4788]: E1211 09:42:15.485859 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-8k66n" podUID="ed639405-5b9d-491f-b673-3d0657a14ecf" Dec 11 09:42:15 crc kubenswrapper[4788]: E1211 09:42:15.677138 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-8k66n" podUID="ed639405-5b9d-491f-b673-3d0657a14ecf" Dec 11 09:42:15 crc kubenswrapper[4788]: I1211 09:42:15.954061 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-z48xt"] Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.432433 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-5xddv"] Dec 11 09:42:16 crc kubenswrapper[4788]: W1211 09:42:16.439761 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c51946a_22cd_4576_8c5d_a37ec2746c2e.slice/crio-9e15c560cc56f8526b6d876d754432081f26f7d82785b1d9a30424c06345be0b WatchSource:0}: Error finding container 9e15c560cc56f8526b6d876d754432081f26f7d82785b1d9a30424c06345be0b: Status 404 returned error can't find the container with id 9e15c560cc56f8526b6d876d754432081f26f7d82785b1d9a30424c06345be0b Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.444259 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 11 09:42:16 crc kubenswrapper[4788]: W1211 09:42:16.444764 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d51ecca_226a_42a2_ad6c_249628ee18b7.slice/crio-342b9ccfaa5fbfb19f6fb467c62cebb787475b7f6073a72d093af30d1b29e14f WatchSource:0}: Error finding container 342b9ccfaa5fbfb19f6fb467c62cebb787475b7f6073a72d093af30d1b29e14f: Status 404 returned error can't find the container with id 342b9ccfaa5fbfb19f6fb467c62cebb787475b7f6073a72d093af30d1b29e14f Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.447330 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-297b-account-create-update-4fsxw"] Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.451573 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.454561 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.457440 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-75ef-account-create-update-k799k"] Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.465466 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-v4wgz"] Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.469986 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5988-account-create-update-lfdwj"] Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.599046 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-hfflh"] Dec 11 09:42:16 crc kubenswrapper[4788]: W1211 09:42:16.607679 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded92d625_0e3b_43c8_9a8b_c228c701bcba.slice/crio-81b0d69309f35bf02821904fb8233187655856118a83bd4443b06c217e56270b WatchSource:0}: Error finding container 81b0d69309f35bf02821904fb8233187655856118a83bd4443b06c217e56270b: Status 404 returned error can't find the container with id 81b0d69309f35bf02821904fb8233187655856118a83bd4443b06c217e56270b Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.669335 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-v4wgz" event={"ID":"0c51946a-22cd-4576-8c5d-a37ec2746c2e","Type":"ContainerStarted","Data":"9e15c560cc56f8526b6d876d754432081f26f7d82785b1d9a30424c06345be0b"} Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.670655 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5988-account-create-update-lfdwj" event={"ID":"827fe8ce-f75e-4f71-b2d6-5d791513c568","Type":"ContainerStarted","Data":"0ad3740b846bd83790a7d096190d12fa7a4630d88eae82be69f2c1d257c187ad"} Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.671855 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-hfflh" event={"ID":"ed92d625-0e3b-43c8-9a8b-c228c701bcba","Type":"ContainerStarted","Data":"81b0d69309f35bf02821904fb8233187655856118a83bd4443b06c217e56270b"} Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.673401 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-297b-account-create-update-4fsxw" event={"ID":"7d73232b-acdd-4586-a02d-e2f22ce3e0fa","Type":"ContainerStarted","Data":"f3c5f406b462485d5395c07b0f3ce56fe11f53e8a77020c75e2db4de8559e531"} Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.674832 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-75ef-account-create-update-k799k" event={"ID":"3d51ecca-226a-42a2-ad6c-249628ee18b7","Type":"ContainerStarted","Data":"342b9ccfaa5fbfb19f6fb467c62cebb787475b7f6073a72d093af30d1b29e14f"} Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.687247 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-5xddv" event={"ID":"97b7b5ca-b073-49a3-9dc5-adce0490e178","Type":"ContainerStarted","Data":"e58d66a26ada0c14165fbb5a68ed9c72224fa7238000ddcd5aaa0e67eab63dfb"} Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.689410 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-z48xt" event={"ID":"1c99537b-1ec9-4cf3-8fc8-12466dcca552","Type":"ContainerStarted","Data":"d5786b3fa9a6548c1a2aa04befb2e46173d79c9b07e494e81d325c104e7d172a"} Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.689444 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-z48xt" event={"ID":"1c99537b-1ec9-4cf3-8fc8-12466dcca552","Type":"ContainerStarted","Data":"705df48dd394bd443fb81af5a7a1a3ba6da9f2746b4ea4f1272c04f51d206223"} Dec 11 09:42:16 crc kubenswrapper[4788]: I1211 09:42:16.707712 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-z48xt" podStartSLOduration=17.707689785 podStartE2EDuration="17.707689785s" podCreationTimestamp="2025-12-11 09:41:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:42:16.704434012 +0000 UTC m=+1266.775213618" watchObservedRunningTime="2025-12-11 09:42:16.707689785 +0000 UTC m=+1266.778469381" Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.698002 4788 generic.go:334] "Generic (PLEG): container finished" podID="1c99537b-1ec9-4cf3-8fc8-12466dcca552" containerID="d5786b3fa9a6548c1a2aa04befb2e46173d79c9b07e494e81d325c104e7d172a" exitCode=0 Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.698063 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-z48xt" event={"ID":"1c99537b-1ec9-4cf3-8fc8-12466dcca552","Type":"ContainerDied","Data":"d5786b3fa9a6548c1a2aa04befb2e46173d79c9b07e494e81d325c104e7d172a"} Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.700784 4788 generic.go:334] "Generic (PLEG): container finished" podID="0c51946a-22cd-4576-8c5d-a37ec2746c2e" containerID="b7b8821bb730ec9501afae94f6a6cec12b6ed8470388fa4974538c738a70194d" exitCode=0 Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.700891 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-v4wgz" event={"ID":"0c51946a-22cd-4576-8c5d-a37ec2746c2e","Type":"ContainerDied","Data":"b7b8821bb730ec9501afae94f6a6cec12b6ed8470388fa4974538c738a70194d"} Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.702801 4788 generic.go:334] "Generic (PLEG): container finished" podID="827fe8ce-f75e-4f71-b2d6-5d791513c568" containerID="2d136c1b53ae68cb4ce06cf91963a9afda464820ec3bd40ae0502caee8ea0fc4" exitCode=0 Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.702837 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5988-account-create-update-lfdwj" event={"ID":"827fe8ce-f75e-4f71-b2d6-5d791513c568","Type":"ContainerDied","Data":"2d136c1b53ae68cb4ce06cf91963a9afda464820ec3bd40ae0502caee8ea0fc4"} Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.704592 4788 generic.go:334] "Generic (PLEG): container finished" podID="ed92d625-0e3b-43c8-9a8b-c228c701bcba" containerID="b198e42c1dd45ce312a2f26ca1e387b7f2620b5d4497a25c9c2fef3d916bde4d" exitCode=0 Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.704630 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-hfflh" event={"ID":"ed92d625-0e3b-43c8-9a8b-c228c701bcba","Type":"ContainerDied","Data":"b198e42c1dd45ce312a2f26ca1e387b7f2620b5d4497a25c9c2fef3d916bde4d"} Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.706485 4788 generic.go:334] "Generic (PLEG): container finished" podID="7d73232b-acdd-4586-a02d-e2f22ce3e0fa" containerID="72fc5943315c2041e20bcf5ae019cd3bd4440e10eeabb9eb4303e55d716145d6" exitCode=0 Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.706563 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-297b-account-create-update-4fsxw" event={"ID":"7d73232b-acdd-4586-a02d-e2f22ce3e0fa","Type":"ContainerDied","Data":"72fc5943315c2041e20bcf5ae019cd3bd4440e10eeabb9eb4303e55d716145d6"} Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.710381 4788 generic.go:334] "Generic (PLEG): container finished" podID="3d51ecca-226a-42a2-ad6c-249628ee18b7" containerID="8fa6913348e837373aee881b298c02ed31980a4df254562f431cabb1a4f7309d" exitCode=0 Dec 11 09:42:17 crc kubenswrapper[4788]: I1211 09:42:17.710444 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-75ef-account-create-update-k799k" event={"ID":"3d51ecca-226a-42a2-ad6c-249628ee18b7","Type":"ContainerDied","Data":"8fa6913348e837373aee881b298c02ed31980a4df254562f431cabb1a4f7309d"} Dec 11 09:42:18 crc kubenswrapper[4788]: I1211 09:42:18.725295 4788 generic.go:334] "Generic (PLEG): container finished" podID="6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" containerID="810eb962bd5fb8cea653e73316b108a5591ba079ec2e77ba1148c22e75094ef0" exitCode=0 Dec 11 09:42:18 crc kubenswrapper[4788]: I1211 09:42:18.725545 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m5bdb" event={"ID":"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1","Type":"ContainerDied","Data":"810eb962bd5fb8cea653e73316b108a5591ba079ec2e77ba1148c22e75094ef0"} Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.655444 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-hfflh" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.666285 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-z48xt" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.705811 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-75ef-account-create-update-k799k" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.734480 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54xft\" (UniqueName: \"kubernetes.io/projected/ed92d625-0e3b-43c8-9a8b-c228c701bcba-kube-api-access-54xft\") pod \"ed92d625-0e3b-43c8-9a8b-c228c701bcba\" (UID: \"ed92d625-0e3b-43c8-9a8b-c228c701bcba\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.735089 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5988-account-create-update-lfdwj" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.735512 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d51ecca-226a-42a2-ad6c-249628ee18b7-operator-scripts\") pod \"3d51ecca-226a-42a2-ad6c-249628ee18b7\" (UID: \"3d51ecca-226a-42a2-ad6c-249628ee18b7\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.735549 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6wgm\" (UniqueName: \"kubernetes.io/projected/3d51ecca-226a-42a2-ad6c-249628ee18b7-kube-api-access-j6wgm\") pod \"3d51ecca-226a-42a2-ad6c-249628ee18b7\" (UID: \"3d51ecca-226a-42a2-ad6c-249628ee18b7\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.735567 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed92d625-0e3b-43c8-9a8b-c228c701bcba-operator-scripts\") pod \"ed92d625-0e3b-43c8-9a8b-c228c701bcba\" (UID: \"ed92d625-0e3b-43c8-9a8b-c228c701bcba\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.735614 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c99537b-1ec9-4cf3-8fc8-12466dcca552-operator-scripts\") pod \"1c99537b-1ec9-4cf3-8fc8-12466dcca552\" (UID: \"1c99537b-1ec9-4cf3-8fc8-12466dcca552\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.735643 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9clj\" (UniqueName: \"kubernetes.io/projected/1c99537b-1ec9-4cf3-8fc8-12466dcca552-kube-api-access-s9clj\") pod \"1c99537b-1ec9-4cf3-8fc8-12466dcca552\" (UID: \"1c99537b-1ec9-4cf3-8fc8-12466dcca552\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.736111 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d51ecca-226a-42a2-ad6c-249628ee18b7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3d51ecca-226a-42a2-ad6c-249628ee18b7" (UID: "3d51ecca-226a-42a2-ad6c-249628ee18b7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.736179 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed92d625-0e3b-43c8-9a8b-c228c701bcba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ed92d625-0e3b-43c8-9a8b-c228c701bcba" (UID: "ed92d625-0e3b-43c8-9a8b-c228c701bcba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.736685 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c99537b-1ec9-4cf3-8fc8-12466dcca552-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1c99537b-1ec9-4cf3-8fc8-12466dcca552" (UID: "1c99537b-1ec9-4cf3-8fc8-12466dcca552"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.737939 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-297b-account-create-update-4fsxw" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.739825 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d51ecca-226a-42a2-ad6c-249628ee18b7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.739866 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed92d625-0e3b-43c8-9a8b-c228c701bcba-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.739875 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c99537b-1ec9-4cf3-8fc8-12466dcca552-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.741842 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed92d625-0e3b-43c8-9a8b-c228c701bcba-kube-api-access-54xft" (OuterVolumeSpecName: "kube-api-access-54xft") pod "ed92d625-0e3b-43c8-9a8b-c228c701bcba" (UID: "ed92d625-0e3b-43c8-9a8b-c228c701bcba"). InnerVolumeSpecName "kube-api-access-54xft". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.742526 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d51ecca-226a-42a2-ad6c-249628ee18b7-kube-api-access-j6wgm" (OuterVolumeSpecName: "kube-api-access-j6wgm") pod "3d51ecca-226a-42a2-ad6c-249628ee18b7" (UID: "3d51ecca-226a-42a2-ad6c-249628ee18b7"). InnerVolumeSpecName "kube-api-access-j6wgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.742858 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c99537b-1ec9-4cf3-8fc8-12466dcca552-kube-api-access-s9clj" (OuterVolumeSpecName: "kube-api-access-s9clj") pod "1c99537b-1ec9-4cf3-8fc8-12466dcca552" (UID: "1c99537b-1ec9-4cf3-8fc8-12466dcca552"). InnerVolumeSpecName "kube-api-access-s9clj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.753104 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.755216 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-v4wgz" event={"ID":"0c51946a-22cd-4576-8c5d-a37ec2746c2e","Type":"ContainerDied","Data":"9e15c560cc56f8526b6d876d754432081f26f7d82785b1d9a30424c06345be0b"} Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.755297 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e15c560cc56f8526b6d876d754432081f26f7d82785b1d9a30424c06345be0b" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.759418 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-v4wgz" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.760898 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5988-account-create-update-lfdwj" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.760999 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5988-account-create-update-lfdwj" event={"ID":"827fe8ce-f75e-4f71-b2d6-5d791513c568","Type":"ContainerDied","Data":"0ad3740b846bd83790a7d096190d12fa7a4630d88eae82be69f2c1d257c187ad"} Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.761090 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ad3740b846bd83790a7d096190d12fa7a4630d88eae82be69f2c1d257c187ad" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.768875 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-hfflh" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.769375 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-hfflh" event={"ID":"ed92d625-0e3b-43c8-9a8b-c228c701bcba","Type":"ContainerDied","Data":"81b0d69309f35bf02821904fb8233187655856118a83bd4443b06c217e56270b"} Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.769486 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81b0d69309f35bf02821904fb8233187655856118a83bd4443b06c217e56270b" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.781075 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-m5bdb" event={"ID":"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1","Type":"ContainerDied","Data":"64f24a7fed54e06befa09624bcd6efd0bbf1d33cf6dc873615f2bc8c01e228e7"} Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.781129 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64f24a7fed54e06befa09624bcd6efd0bbf1d33cf6dc873615f2bc8c01e228e7" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.781165 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-m5bdb" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.783636 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-297b-account-create-update-4fsxw" event={"ID":"7d73232b-acdd-4586-a02d-e2f22ce3e0fa","Type":"ContainerDied","Data":"f3c5f406b462485d5395c07b0f3ce56fe11f53e8a77020c75e2db4de8559e531"} Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.783760 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3c5f406b462485d5395c07b0f3ce56fe11f53e8a77020c75e2db4de8559e531" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.783721 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-297b-account-create-update-4fsxw" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.785335 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-75ef-account-create-update-k799k" event={"ID":"3d51ecca-226a-42a2-ad6c-249628ee18b7","Type":"ContainerDied","Data":"342b9ccfaa5fbfb19f6fb467c62cebb787475b7f6073a72d093af30d1b29e14f"} Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.785365 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="342b9ccfaa5fbfb19f6fb467c62cebb787475b7f6073a72d093af30d1b29e14f" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.785507 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-75ef-account-create-update-k799k" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.786975 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-z48xt" event={"ID":"1c99537b-1ec9-4cf3-8fc8-12466dcca552","Type":"ContainerDied","Data":"705df48dd394bd443fb81af5a7a1a3ba6da9f2746b4ea4f1272c04f51d206223"} Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.787022 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="705df48dd394bd443fb81af5a7a1a3ba6da9f2746b4ea4f1272c04f51d206223" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.787056 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-z48xt" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.842278 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js7s8\" (UniqueName: \"kubernetes.io/projected/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-kube-api-access-js7s8\") pod \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.842345 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-etc-swift\") pod \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.842382 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-dispersionconf\") pod \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.842447 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-combined-ca-bundle\") pod \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.842488 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26jss\" (UniqueName: \"kubernetes.io/projected/0c51946a-22cd-4576-8c5d-a37ec2746c2e-kube-api-access-26jss\") pod \"0c51946a-22cd-4576-8c5d-a37ec2746c2e\" (UID: \"0c51946a-22cd-4576-8c5d-a37ec2746c2e\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.842537 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-ring-data-devices\") pod \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.842566 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/827fe8ce-f75e-4f71-b2d6-5d791513c568-operator-scripts\") pod \"827fe8ce-f75e-4f71-b2d6-5d791513c568\" (UID: \"827fe8ce-f75e-4f71-b2d6-5d791513c568\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.843099 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/827fe8ce-f75e-4f71-b2d6-5d791513c568-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "827fe8ce-f75e-4f71-b2d6-5d791513c568" (UID: "827fe8ce-f75e-4f71-b2d6-5d791513c568"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.843245 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2q7d\" (UniqueName: \"kubernetes.io/projected/827fe8ce-f75e-4f71-b2d6-5d791513c568-kube-api-access-q2q7d\") pod \"827fe8ce-f75e-4f71-b2d6-5d791513c568\" (UID: \"827fe8ce-f75e-4f71-b2d6-5d791513c568\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.843309 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-operator-scripts\") pod \"7d73232b-acdd-4586-a02d-e2f22ce3e0fa\" (UID: \"7d73232b-acdd-4586-a02d-e2f22ce3e0fa\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.843379 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" (UID: "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.843421 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-swiftconf\") pod \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.843554 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-scripts\") pod \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\" (UID: \"6f5299be-0ac1-4048-b2aa-6a07ce5c30e1\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.843632 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvrkh\" (UniqueName: \"kubernetes.io/projected/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-kube-api-access-jvrkh\") pod \"7d73232b-acdd-4586-a02d-e2f22ce3e0fa\" (UID: \"7d73232b-acdd-4586-a02d-e2f22ce3e0fa\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.843686 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c51946a-22cd-4576-8c5d-a37ec2746c2e-operator-scripts\") pod \"0c51946a-22cd-4576-8c5d-a37ec2746c2e\" (UID: \"0c51946a-22cd-4576-8c5d-a37ec2746c2e\") " Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.844507 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54xft\" (UniqueName: \"kubernetes.io/projected/ed92d625-0e3b-43c8-9a8b-c228c701bcba-kube-api-access-54xft\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.844517 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7d73232b-acdd-4586-a02d-e2f22ce3e0fa" (UID: "7d73232b-acdd-4586-a02d-e2f22ce3e0fa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.844533 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/827fe8ce-f75e-4f71-b2d6-5d791513c568-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.844626 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6wgm\" (UniqueName: \"kubernetes.io/projected/3d51ecca-226a-42a2-ad6c-249628ee18b7-kube-api-access-j6wgm\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.844639 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9clj\" (UniqueName: \"kubernetes.io/projected/1c99537b-1ec9-4cf3-8fc8-12466dcca552-kube-api-access-s9clj\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.844652 4788 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.844853 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c51946a-22cd-4576-8c5d-a37ec2746c2e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0c51946a-22cd-4576-8c5d-a37ec2746c2e" (UID: "0c51946a-22cd-4576-8c5d-a37ec2746c2e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.845212 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" (UID: "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.848918 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-kube-api-access-js7s8" (OuterVolumeSpecName: "kube-api-access-js7s8") pod "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" (UID: "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1"). InnerVolumeSpecName "kube-api-access-js7s8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.849014 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c51946a-22cd-4576-8c5d-a37ec2746c2e-kube-api-access-26jss" (OuterVolumeSpecName: "kube-api-access-26jss") pod "0c51946a-22cd-4576-8c5d-a37ec2746c2e" (UID: "0c51946a-22cd-4576-8c5d-a37ec2746c2e"). InnerVolumeSpecName "kube-api-access-26jss". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.849452 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-kube-api-access-jvrkh" (OuterVolumeSpecName: "kube-api-access-jvrkh") pod "7d73232b-acdd-4586-a02d-e2f22ce3e0fa" (UID: "7d73232b-acdd-4586-a02d-e2f22ce3e0fa"). InnerVolumeSpecName "kube-api-access-jvrkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.850549 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/827fe8ce-f75e-4f71-b2d6-5d791513c568-kube-api-access-q2q7d" (OuterVolumeSpecName: "kube-api-access-q2q7d") pod "827fe8ce-f75e-4f71-b2d6-5d791513c568" (UID: "827fe8ce-f75e-4f71-b2d6-5d791513c568"). InnerVolumeSpecName "kube-api-access-q2q7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.851812 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" (UID: "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.867455 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" (UID: "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.871951 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-scripts" (OuterVolumeSpecName: "scripts") pod "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" (UID: "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.872463 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" (UID: "6f5299be-0ac1-4048-b2aa-6a07ce5c30e1"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.946453 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.946858 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26jss\" (UniqueName: \"kubernetes.io/projected/0c51946a-22cd-4576-8c5d-a37ec2746c2e-kube-api-access-26jss\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.946876 4788 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.946887 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2q7d\" (UniqueName: \"kubernetes.io/projected/827fe8ce-f75e-4f71-b2d6-5d791513c568-kube-api-access-q2q7d\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.946898 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.946911 4788 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.946922 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.946932 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvrkh\" (UniqueName: \"kubernetes.io/projected/7d73232b-acdd-4586-a02d-e2f22ce3e0fa-kube-api-access-jvrkh\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.946946 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c51946a-22cd-4576-8c5d-a37ec2746c2e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.946957 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js7s8\" (UniqueName: \"kubernetes.io/projected/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-kube-api-access-js7s8\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:21 crc kubenswrapper[4788]: I1211 09:42:21.946968 4788 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6f5299be-0ac1-4048-b2aa-6a07ce5c30e1-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:22 crc kubenswrapper[4788]: I1211 09:42:22.806685 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-v4wgz" Dec 11 09:42:22 crc kubenswrapper[4788]: I1211 09:42:22.808551 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-5xddv" event={"ID":"97b7b5ca-b073-49a3-9dc5-adce0490e178","Type":"ContainerStarted","Data":"d43cb1543e46a6eb2467ef962404dcf11cd186351ebd33a7e0fe1260bd58c483"} Dec 11 09:42:22 crc kubenswrapper[4788]: I1211 09:42:22.861396 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-5xddv" podStartSLOduration=18.803396949 podStartE2EDuration="23.86137562s" podCreationTimestamp="2025-12-11 09:41:59 +0000 UTC" firstStartedPulling="2025-12-11 09:42:16.43221954 +0000 UTC m=+1266.502999126" lastFinishedPulling="2025-12-11 09:42:21.490198211 +0000 UTC m=+1271.560977797" observedRunningTime="2025-12-11 09:42:22.840705781 +0000 UTC m=+1272.911485367" watchObservedRunningTime="2025-12-11 09:42:22.86137562 +0000 UTC m=+1272.932155206" Dec 11 09:42:26 crc kubenswrapper[4788]: I1211 09:42:26.850124 4788 generic.go:334] "Generic (PLEG): container finished" podID="97b7b5ca-b073-49a3-9dc5-adce0490e178" containerID="d43cb1543e46a6eb2467ef962404dcf11cd186351ebd33a7e0fe1260bd58c483" exitCode=0 Dec 11 09:42:26 crc kubenswrapper[4788]: I1211 09:42:26.850361 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-5xddv" event={"ID":"97b7b5ca-b073-49a3-9dc5-adce0490e178","Type":"ContainerDied","Data":"d43cb1543e46a6eb2467ef962404dcf11cd186351ebd33a7e0fe1260bd58c483"} Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.429475 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-5xddv" Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.571527 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-combined-ca-bundle\") pod \"97b7b5ca-b073-49a3-9dc5-adce0490e178\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.571591 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-config-data\") pod \"97b7b5ca-b073-49a3-9dc5-adce0490e178\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.571979 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dn2cs\" (UniqueName: \"kubernetes.io/projected/97b7b5ca-b073-49a3-9dc5-adce0490e178-kube-api-access-dn2cs\") pod \"97b7b5ca-b073-49a3-9dc5-adce0490e178\" (UID: \"97b7b5ca-b073-49a3-9dc5-adce0490e178\") " Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.576004 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97b7b5ca-b073-49a3-9dc5-adce0490e178-kube-api-access-dn2cs" (OuterVolumeSpecName: "kube-api-access-dn2cs") pod "97b7b5ca-b073-49a3-9dc5-adce0490e178" (UID: "97b7b5ca-b073-49a3-9dc5-adce0490e178"). InnerVolumeSpecName "kube-api-access-dn2cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.589020 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dn2cs\" (UniqueName: \"kubernetes.io/projected/97b7b5ca-b073-49a3-9dc5-adce0490e178-kube-api-access-dn2cs\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.602782 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97b7b5ca-b073-49a3-9dc5-adce0490e178" (UID: "97b7b5ca-b073-49a3-9dc5-adce0490e178"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.634348 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-config-data" (OuterVolumeSpecName: "config-data") pod "97b7b5ca-b073-49a3-9dc5-adce0490e178" (UID: "97b7b5ca-b073-49a3-9dc5-adce0490e178"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.690936 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.690984 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b7b5ca-b073-49a3-9dc5-adce0490e178-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.873182 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-5xddv" event={"ID":"97b7b5ca-b073-49a3-9dc5-adce0490e178","Type":"ContainerDied","Data":"e58d66a26ada0c14165fbb5a68ed9c72224fa7238000ddcd5aaa0e67eab63dfb"} Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.873253 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e58d66a26ada0c14165fbb5a68ed9c72224fa7238000ddcd5aaa0e67eab63dfb" Dec 11 09:42:28 crc kubenswrapper[4788]: I1211 09:42:28.873331 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-5xddv" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.076285 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-5tp2p"] Dec 11 09:42:29 crc kubenswrapper[4788]: E1211 09:42:29.076640 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" containerName="swift-ring-rebalance" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.076657 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" containerName="swift-ring-rebalance" Dec 11 09:42:29 crc kubenswrapper[4788]: E1211 09:42:29.076672 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed92d625-0e3b-43c8-9a8b-c228c701bcba" containerName="mariadb-database-create" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.076680 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed92d625-0e3b-43c8-9a8b-c228c701bcba" containerName="mariadb-database-create" Dec 11 09:42:29 crc kubenswrapper[4788]: E1211 09:42:29.076691 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="827fe8ce-f75e-4f71-b2d6-5d791513c568" containerName="mariadb-account-create-update" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.076697 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="827fe8ce-f75e-4f71-b2d6-5d791513c568" containerName="mariadb-account-create-update" Dec 11 09:42:29 crc kubenswrapper[4788]: E1211 09:42:29.076704 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d51ecca-226a-42a2-ad6c-249628ee18b7" containerName="mariadb-account-create-update" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.076710 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d51ecca-226a-42a2-ad6c-249628ee18b7" containerName="mariadb-account-create-update" Dec 11 09:42:29 crc kubenswrapper[4788]: E1211 09:42:29.076722 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d73232b-acdd-4586-a02d-e2f22ce3e0fa" containerName="mariadb-account-create-update" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.076727 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d73232b-acdd-4586-a02d-e2f22ce3e0fa" containerName="mariadb-account-create-update" Dec 11 09:42:29 crc kubenswrapper[4788]: E1211 09:42:29.076739 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c99537b-1ec9-4cf3-8fc8-12466dcca552" containerName="mariadb-database-create" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.076744 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c99537b-1ec9-4cf3-8fc8-12466dcca552" containerName="mariadb-database-create" Dec 11 09:42:29 crc kubenswrapper[4788]: E1211 09:42:29.076758 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97b7b5ca-b073-49a3-9dc5-adce0490e178" containerName="keystone-db-sync" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.076764 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="97b7b5ca-b073-49a3-9dc5-adce0490e178" containerName="keystone-db-sync" Dec 11 09:42:29 crc kubenswrapper[4788]: E1211 09:42:29.076779 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c51946a-22cd-4576-8c5d-a37ec2746c2e" containerName="mariadb-database-create" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.076785 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c51946a-22cd-4576-8c5d-a37ec2746c2e" containerName="mariadb-database-create" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.076982 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed92d625-0e3b-43c8-9a8b-c228c701bcba" containerName="mariadb-database-create" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.077006 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="97b7b5ca-b073-49a3-9dc5-adce0490e178" containerName="keystone-db-sync" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.077013 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="827fe8ce-f75e-4f71-b2d6-5d791513c568" containerName="mariadb-account-create-update" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.077023 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d51ecca-226a-42a2-ad6c-249628ee18b7" containerName="mariadb-account-create-update" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.077034 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f5299be-0ac1-4048-b2aa-6a07ce5c30e1" containerName="swift-ring-rebalance" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.077043 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d73232b-acdd-4586-a02d-e2f22ce3e0fa" containerName="mariadb-account-create-update" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.077052 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c99537b-1ec9-4cf3-8fc8-12466dcca552" containerName="mariadb-database-create" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.077058 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c51946a-22cd-4576-8c5d-a37ec2746c2e" containerName="mariadb-database-create" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.078788 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.086333 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.086832 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.088727 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gthls" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.088954 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.094622 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.097256 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-config-data\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.097330 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-fernet-keys\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.097384 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2x98\" (UniqueName: \"kubernetes.io/projected/e582e0d2-1009-4db1-bbae-9a1d18041e74-kube-api-access-x2x98\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.097442 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-scripts\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.097462 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-combined-ca-bundle\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.097487 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-credential-keys\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.102306 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-w5vtj"] Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.110123 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.127294 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-w5vtj"] Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.200467 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.200578 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-scripts\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.200619 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-combined-ca-bundle\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.200662 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-credential-keys\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.200758 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8xsg\" (UniqueName: \"kubernetes.io/projected/76420939-68ca-412d-8db0-9a2dd7b0cba7-kube-api-access-h8xsg\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.200784 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-config-data\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.200815 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-config\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.200883 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.200916 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.200939 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-fernet-keys\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.200986 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2x98\" (UniqueName: \"kubernetes.io/projected/e582e0d2-1009-4db1-bbae-9a1d18041e74-kube-api-access-x2x98\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.213592 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-combined-ca-bundle\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.213801 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-scripts\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.219730 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-config-data\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.220582 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-credential-keys\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.222077 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-fernet-keys\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.227025 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5tp2p"] Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.238979 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2x98\" (UniqueName: \"kubernetes.io/projected/e582e0d2-1009-4db1-bbae-9a1d18041e74-kube-api-access-x2x98\") pod \"keystone-bootstrap-5tp2p\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.304012 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8xsg\" (UniqueName: \"kubernetes.io/projected/76420939-68ca-412d-8db0-9a2dd7b0cba7-kube-api-access-h8xsg\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.304108 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-config\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.304195 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.304265 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.304517 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.305676 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-config\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.306875 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-bwvb7"] Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.308333 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.310584 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.311423 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.313104 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.322815 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-6xftg" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.323095 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.323272 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.346482 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7c768b8c89-58hs6"] Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.356458 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.360487 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-9zmxc" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.361331 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.368396 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8xsg\" (UniqueName: \"kubernetes.io/projected/76420939-68ca-412d-8db0-9a2dd7b0cba7-kube-api-access-h8xsg\") pod \"dnsmasq-dns-5c9d85d47c-w5vtj\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.372290 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.390758 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.413247 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.415083 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53fad680-1537-4631-a479-83e53ce72983-logs\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.415133 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-combined-ca-bundle\") pod \"neutron-db-sync-bwvb7\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.415191 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-scripts\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.429384 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk6sz\" (UniqueName: \"kubernetes.io/projected/32f6b143-7465-4311-85d7-d7668fde477c-kube-api-access-fk6sz\") pod \"neutron-db-sync-bwvb7\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.429654 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrmdm\" (UniqueName: \"kubernetes.io/projected/53fad680-1537-4631-a479-83e53ce72983-kube-api-access-zrmdm\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.430116 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53fad680-1537-4631-a479-83e53ce72983-horizon-secret-key\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.430253 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-config\") pod \"neutron-db-sync-bwvb7\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.430509 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-config-data\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.460299 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.532541 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-combined-ca-bundle\") pod \"neutron-db-sync-bwvb7\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.532663 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-scripts\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.532701 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk6sz\" (UniqueName: \"kubernetes.io/projected/32f6b143-7465-4311-85d7-d7668fde477c-kube-api-access-fk6sz\") pod \"neutron-db-sync-bwvb7\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.532777 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrmdm\" (UniqueName: \"kubernetes.io/projected/53fad680-1537-4631-a479-83e53ce72983-kube-api-access-zrmdm\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.532835 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53fad680-1537-4631-a479-83e53ce72983-horizon-secret-key\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.532891 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-config\") pod \"neutron-db-sync-bwvb7\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.532937 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-config-data\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.533044 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53fad680-1537-4631-a479-83e53ce72983-logs\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.575009 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53fad680-1537-4631-a479-83e53ce72983-logs\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.604651 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-config-data\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.606639 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-scripts\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.614106 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-config\") pod \"neutron-db-sync-bwvb7\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.614547 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53fad680-1537-4631-a479-83e53ce72983-horizon-secret-key\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.641695 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-combined-ca-bundle\") pod \"neutron-db-sync-bwvb7\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.673722 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk6sz\" (UniqueName: \"kubernetes.io/projected/32f6b143-7465-4311-85d7-d7668fde477c-kube-api-access-fk6sz\") pod \"neutron-db-sync-bwvb7\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.673816 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bwvb7"] Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.713915 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrmdm\" (UniqueName: \"kubernetes.io/projected/53fad680-1537-4631-a479-83e53ce72983-kube-api-access-zrmdm\") pod \"horizon-7c768b8c89-58hs6\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.718330 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7c768b8c89-58hs6"] Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.758439 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.810223 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.832785 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-bkcpt"] Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.834174 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.837641 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-hdstb" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.848385 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.854566 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.878527 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-bkcpt"] Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.916615 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.951157 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-db-sync-config-data\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.953442 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.960458 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-scripts\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.960602 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ggfb\" (UniqueName: \"kubernetes.io/projected/539632da-1b63-429c-9d5c-3be34a9457ad-kube-api-access-8ggfb\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.960731 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-combined-ca-bundle\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.960862 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/539632da-1b63-429c-9d5c-3be34a9457ad-etc-machine-id\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.960908 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-config-data\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.963346 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.971493 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 11 09:42:29 crc kubenswrapper[4788]: I1211 09:42:29.977443 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8k66n" event={"ID":"ed639405-5b9d-491f-b673-3d0657a14ecf","Type":"ContainerStarted","Data":"99d7855546af3787a9a2209a97ca65d78e41ec27d469523f594380049193aa2a"} Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.018313 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-tq2kw"] Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.021456 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.087643 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rkclk" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.087872 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.091086 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-tq2kw"] Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.106885 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118004 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-log-httpd\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118091 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118129 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-config-data\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118160 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-db-sync-config-data\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118182 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-scripts\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118245 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-scripts\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118285 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ggfb\" (UniqueName: \"kubernetes.io/projected/539632da-1b63-429c-9d5c-3be34a9457ad-kube-api-access-8ggfb\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118328 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-combined-ca-bundle\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118350 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-run-httpd\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118415 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/539632da-1b63-429c-9d5c-3be34a9457ad-etc-machine-id\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118461 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118488 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-config-data\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.118512 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4sxx\" (UniqueName: \"kubernetes.io/projected/420d2814-b791-4928-8ebf-691365abfec1-kube-api-access-h4sxx\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.130407 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-config-data\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.130531 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/539632da-1b63-429c-9d5c-3be34a9457ad-etc-machine-id\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.144441 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-db-sync-config-data\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.145278 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-scripts\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.160416 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-combined-ca-bundle\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.181005 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ggfb\" (UniqueName: \"kubernetes.io/projected/539632da-1b63-429c-9d5c-3be34a9457ad-kube-api-access-8ggfb\") pod \"cinder-db-sync-bkcpt\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.218926 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-scdh4"] Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.221968 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-scdh4" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.231301 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-j88gj" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.231315 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-logs\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.231651 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-run-httpd\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.231731 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-combined-ca-bundle\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.231792 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-scripts\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.231817 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.231842 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4sxx\" (UniqueName: \"kubernetes.io/projected/420d2814-b791-4928-8ebf-691365abfec1-kube-api-access-h4sxx\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.232985 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-log-httpd\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.234050 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-run-httpd\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.235963 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.245562 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-log-httpd\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.245697 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.245753 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-config-data\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.245792 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-scripts\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.245952 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-config-data\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.245996 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbx6z\" (UniqueName: \"kubernetes.io/projected/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-kube-api-access-qbx6z\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.260414 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.260516 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.262398 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.263171 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-scripts\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.264335 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-config-data\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.274781 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4sxx\" (UniqueName: \"kubernetes.io/projected/420d2814-b791-4928-8ebf-691365abfec1-kube-api-access-h4sxx\") pod \"ceilometer-0\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.294546 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-scdh4"] Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.295111 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.304699 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-747958c97-dq5mr"] Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.310015 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.314615 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-747958c97-dq5mr"] Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.336071 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-w5vtj"] Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359213 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-config-data\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359276 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz966\" (UniqueName: \"kubernetes.io/projected/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-kube-api-access-rz966\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359302 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbx6z\" (UniqueName: \"kubernetes.io/projected/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-kube-api-access-qbx6z\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359321 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-config-data\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359357 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-logs\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359374 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-logs\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359409 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-combined-ca-bundle\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359442 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-scripts\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359506 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-db-sync-config-data\") pod \"barbican-db-sync-scdh4\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " pod="openstack/barbican-db-sync-scdh4" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359527 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-combined-ca-bundle\") pod \"barbican-db-sync-scdh4\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " pod="openstack/barbican-db-sync-scdh4" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359542 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brn4t\" (UniqueName: \"kubernetes.io/projected/169bf402-16b5-4aa7-838a-094a2e4c3330-kube-api-access-brn4t\") pod \"barbican-db-sync-scdh4\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " pod="openstack/barbican-db-sync-scdh4" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359565 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-horizon-secret-key\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.359592 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-scripts\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.363186 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-logs\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.366026 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-scripts\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.376266 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-combined-ca-bundle\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.383081 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-mtsfp"] Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.385007 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.391333 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-config-data\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.392795 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbx6z\" (UniqueName: \"kubernetes.io/projected/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-kube-api-access-qbx6z\") pod \"placement-db-sync-tq2kw\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.400302 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-mtsfp"] Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.432774 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-8k66n" podStartSLOduration=3.200110849 podStartE2EDuration="46.432749392s" podCreationTimestamp="2025-12-11 09:41:44 +0000 UTC" firstStartedPulling="2025-12-11 09:41:45.082818338 +0000 UTC m=+1235.153597924" lastFinishedPulling="2025-12-11 09:42:28.315456881 +0000 UTC m=+1278.386236467" observedRunningTime="2025-12-11 09:42:30.138777434 +0000 UTC m=+1280.209557040" watchObservedRunningTime="2025-12-11 09:42:30.432749392 +0000 UTC m=+1280.503528978" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.458658 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463217 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-dns-svc\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463360 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5mpq\" (UniqueName: \"kubernetes.io/projected/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-kube-api-access-s5mpq\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463455 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463530 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463609 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-db-sync-config-data\") pod \"barbican-db-sync-scdh4\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " pod="openstack/barbican-db-sync-scdh4" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463660 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-combined-ca-bundle\") pod \"barbican-db-sync-scdh4\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " pod="openstack/barbican-db-sync-scdh4" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463685 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brn4t\" (UniqueName: \"kubernetes.io/projected/169bf402-16b5-4aa7-838a-094a2e4c3330-kube-api-access-brn4t\") pod \"barbican-db-sync-scdh4\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " pod="openstack/barbican-db-sync-scdh4" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463739 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-horizon-secret-key\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463778 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-scripts\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463875 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz966\" (UniqueName: \"kubernetes.io/projected/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-kube-api-access-rz966\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463934 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-config\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.463982 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-config-data\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.464112 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-logs\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.465406 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-logs\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.466913 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-scripts\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.471609 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-db-sync-config-data\") pod \"barbican-db-sync-scdh4\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " pod="openstack/barbican-db-sync-scdh4" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.473343 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-config-data\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.476382 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-combined-ca-bundle\") pod \"barbican-db-sync-scdh4\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " pod="openstack/barbican-db-sync-scdh4" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.476810 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tq2kw" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.477418 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-horizon-secret-key\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.501677 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz966\" (UniqueName: \"kubernetes.io/projected/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-kube-api-access-rz966\") pod \"horizon-747958c97-dq5mr\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.567211 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-dns-svc\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.567369 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5mpq\" (UniqueName: \"kubernetes.io/projected/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-kube-api-access-s5mpq\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.567459 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.567524 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.567740 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-config\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.703940 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-config\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.706020 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-dns-svc\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.706349 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.723852 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brn4t\" (UniqueName: \"kubernetes.io/projected/169bf402-16b5-4aa7-838a-094a2e4c3330-kube-api-access-brn4t\") pod \"barbican-db-sync-scdh4\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " pod="openstack/barbican-db-sync-scdh4" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.750138 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5mpq\" (UniqueName: \"kubernetes.io/projected/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-kube-api-access-s5mpq\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.750428 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffb94d8ff-mtsfp\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.769680 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-scdh4" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.797623 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.823489 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-w5vtj"] Dec 11 09:42:30 crc kubenswrapper[4788]: I1211 09:42:30.825343 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:31 crc kubenswrapper[4788]: I1211 09:42:31.009872 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" event={"ID":"76420939-68ca-412d-8db0-9a2dd7b0cba7","Type":"ContainerStarted","Data":"a4786273e17cd62d8ca2eee48bd72ac109d5b95ecf69562c20922e79cbc0b1d9"} Dec 11 09:42:31 crc kubenswrapper[4788]: I1211 09:42:31.080066 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7c768b8c89-58hs6"] Dec 11 09:42:31 crc kubenswrapper[4788]: I1211 09:42:31.095897 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5tp2p"] Dec 11 09:42:31 crc kubenswrapper[4788]: I1211 09:42:31.681037 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bwvb7"] Dec 11 09:42:31 crc kubenswrapper[4788]: I1211 09:42:31.811961 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-bkcpt"] Dec 11 09:42:32 crc kubenswrapper[4788]: W1211 09:42:32.051108 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod420d2814_b791_4928_8ebf_691365abfec1.slice/crio-61c4d1b873be3019be6f1f49d97ed66aaa12439bdbf6606fa672ecd3dba01915 WatchSource:0}: Error finding container 61c4d1b873be3019be6f1f49d97ed66aaa12439bdbf6606fa672ecd3dba01915: Status 404 returned error can't find the container with id 61c4d1b873be3019be6f1f49d97ed66aaa12439bdbf6606fa672ecd3dba01915 Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.054393 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.055671 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bwvb7" event={"ID":"32f6b143-7465-4311-85d7-d7668fde477c","Type":"ContainerStarted","Data":"60c388bf45ebfc8ba0e5ce5859275377d54bc1843a03d658ea7b3f862ced7790"} Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.071942 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-tq2kw"] Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.075359 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bkcpt" event={"ID":"539632da-1b63-429c-9d5c-3be34a9457ad","Type":"ContainerStarted","Data":"e7b2684dbbcd7e04da6b8cb4d4e7687d3d5bb449a35302c8f20ec8ee5006849c"} Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.080772 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-scdh4"] Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.082114 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5tp2p" event={"ID":"e582e0d2-1009-4db1-bbae-9a1d18041e74","Type":"ContainerStarted","Data":"f8c7d02729c9b90138f00fa77dd2c83ce2ff21861d7d326817a2a15a1b7b00ac"} Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.090066 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c768b8c89-58hs6" event={"ID":"53fad680-1537-4631-a479-83e53ce72983","Type":"ContainerStarted","Data":"8e3cbff85bc3bee3fda409b381abb24850f4ae956dd26bd5ed50b21144a69938"} Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.221023 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-747958c97-dq5mr"] Dec 11 09:42:32 crc kubenswrapper[4788]: W1211 09:42:32.228978 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e0171c8_cfda_4c64_9a70_5d1e596c8cd8.slice/crio-d3b4b33f663678159298253408f03acfbe869edc99e05aef1fd28c7e9d49ccfd WatchSource:0}: Error finding container d3b4b33f663678159298253408f03acfbe869edc99e05aef1fd28c7e9d49ccfd: Status 404 returned error can't find the container with id d3b4b33f663678159298253408f03acfbe869edc99e05aef1fd28c7e9d49ccfd Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.235646 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-mtsfp"] Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.749894 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-747958c97-dq5mr"] Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.799903 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.846455 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5b4cc889c-kk475"] Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.848027 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.888096 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5b4cc889c-kk475"] Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.986755 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f5296a92-0fde-4d63-873c-724ba7e577a5-horizon-secret-key\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.987254 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-config-data\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.987359 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-scripts\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.987540 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5296a92-0fde-4d63-873c-724ba7e577a5-logs\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:32 crc kubenswrapper[4788]: I1211 09:42:32.987656 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq2xd\" (UniqueName: \"kubernetes.io/projected/f5296a92-0fde-4d63-873c-724ba7e577a5-kube-api-access-tq2xd\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.090296 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq2xd\" (UniqueName: \"kubernetes.io/projected/f5296a92-0fde-4d63-873c-724ba7e577a5-kube-api-access-tq2xd\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.090384 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f5296a92-0fde-4d63-873c-724ba7e577a5-horizon-secret-key\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.090423 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-config-data\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.090444 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-scripts\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.090513 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5296a92-0fde-4d63-873c-724ba7e577a5-logs\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.090903 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5296a92-0fde-4d63-873c-724ba7e577a5-logs\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.091634 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-scripts\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.092216 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-config-data\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.097169 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f5296a92-0fde-4d63-873c-724ba7e577a5-horizon-secret-key\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.102691 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" event={"ID":"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8","Type":"ContainerStarted","Data":"d3b4b33f663678159298253408f03acfbe869edc99e05aef1fd28c7e9d49ccfd"} Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.103903 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-747958c97-dq5mr" event={"ID":"9dc702d3-65d8-4643-b3fe-9517e8bff3b8","Type":"ContainerStarted","Data":"a3196bc4540246b4e82d659cd515bb271ffba6094067b0a6bef5300ae17851f0"} Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.105836 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tq2kw" event={"ID":"d2f70f9c-2c54-4a03-adbc-af34bf91a63b","Type":"ContainerStarted","Data":"cde5eea2c2f4d4d08a7db4cd2520d9e9cd5595d71943c964e9710ac692a66250"} Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.106807 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"420d2814-b791-4928-8ebf-691365abfec1","Type":"ContainerStarted","Data":"61c4d1b873be3019be6f1f49d97ed66aaa12439bdbf6606fa672ecd3dba01915"} Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.108176 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-scdh4" event={"ID":"169bf402-16b5-4aa7-838a-094a2e4c3330","Type":"ContainerStarted","Data":"599ac891ed3601e138d834c2490fcfc6f0dd26709b33b4b64cf531cdb7d0fd89"} Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.121172 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq2xd\" (UniqueName: \"kubernetes.io/projected/f5296a92-0fde-4d63-873c-724ba7e577a5-kube-api-access-tq2xd\") pod \"horizon-5b4cc889c-kk475\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.187824 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:42:33 crc kubenswrapper[4788]: I1211 09:42:33.775108 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5b4cc889c-kk475"] Dec 11 09:42:34 crc kubenswrapper[4788]: I1211 09:42:34.117804 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b4cc889c-kk475" event={"ID":"f5296a92-0fde-4d63-873c-724ba7e577a5","Type":"ContainerStarted","Data":"053b55fce64ffe926ea4df0a1e081b4d51571002509164bf39ff0d11bd14cf30"} Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.145075 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5tp2p" event={"ID":"e582e0d2-1009-4db1-bbae-9a1d18041e74","Type":"ContainerStarted","Data":"0ed4348b9228569d67703a8fd887ba7da79f772c3db0497c59e8a0b0ff2d7465"} Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.147108 4788 generic.go:334] "Generic (PLEG): container finished" podID="3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" containerID="45b28d4c23b4185f8a0f10b0cd669454912ed29a18d463ebc51547595454541c" exitCode=0 Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.147164 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" event={"ID":"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8","Type":"ContainerDied","Data":"45b28d4c23b4185f8a0f10b0cd669454912ed29a18d463ebc51547595454541c"} Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.150862 4788 generic.go:334] "Generic (PLEG): container finished" podID="76420939-68ca-412d-8db0-9a2dd7b0cba7" containerID="0ac1760fb6959d8c9be63201e2deb398e15ee61b6676afffe3fb5f563d0e9049" exitCode=0 Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.151654 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" event={"ID":"76420939-68ca-412d-8db0-9a2dd7b0cba7","Type":"ContainerDied","Data":"0ac1760fb6959d8c9be63201e2deb398e15ee61b6676afffe3fb5f563d0e9049"} Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.153031 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bwvb7" event={"ID":"32f6b143-7465-4311-85d7-d7668fde477c","Type":"ContainerStarted","Data":"86b8455f9f4b87b9234f4d8fe45e0647e65a9a6431e22dc6688e5d81c3af5966"} Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.170614 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-5tp2p" podStartSLOduration=7.1705757 podStartE2EDuration="7.1705757s" podCreationTimestamp="2025-12-11 09:42:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:42:36.164030093 +0000 UTC m=+1286.234809699" watchObservedRunningTime="2025-12-11 09:42:36.1705757 +0000 UTC m=+1286.241355286" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.196819 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-bwvb7" podStartSLOduration=7.19678967 podStartE2EDuration="7.19678967s" podCreationTimestamp="2025-12-11 09:42:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:42:36.18268971 +0000 UTC m=+1286.253469316" watchObservedRunningTime="2025-12-11 09:42:36.19678967 +0000 UTC m=+1286.267569256" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.695029 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.772511 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-dns-svc\") pod \"76420939-68ca-412d-8db0-9a2dd7b0cba7\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.772621 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-nb\") pod \"76420939-68ca-412d-8db0-9a2dd7b0cba7\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.772755 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8xsg\" (UniqueName: \"kubernetes.io/projected/76420939-68ca-412d-8db0-9a2dd7b0cba7-kube-api-access-h8xsg\") pod \"76420939-68ca-412d-8db0-9a2dd7b0cba7\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.773973 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-config\") pod \"76420939-68ca-412d-8db0-9a2dd7b0cba7\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.774174 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-sb\") pod \"76420939-68ca-412d-8db0-9a2dd7b0cba7\" (UID: \"76420939-68ca-412d-8db0-9a2dd7b0cba7\") " Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.780748 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76420939-68ca-412d-8db0-9a2dd7b0cba7-kube-api-access-h8xsg" (OuterVolumeSpecName: "kube-api-access-h8xsg") pod "76420939-68ca-412d-8db0-9a2dd7b0cba7" (UID: "76420939-68ca-412d-8db0-9a2dd7b0cba7"). InnerVolumeSpecName "kube-api-access-h8xsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.799110 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "76420939-68ca-412d-8db0-9a2dd7b0cba7" (UID: "76420939-68ca-412d-8db0-9a2dd7b0cba7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.802003 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-config" (OuterVolumeSpecName: "config") pod "76420939-68ca-412d-8db0-9a2dd7b0cba7" (UID: "76420939-68ca-412d-8db0-9a2dd7b0cba7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.811345 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "76420939-68ca-412d-8db0-9a2dd7b0cba7" (UID: "76420939-68ca-412d-8db0-9a2dd7b0cba7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.811356 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76420939-68ca-412d-8db0-9a2dd7b0cba7" (UID: "76420939-68ca-412d-8db0-9a2dd7b0cba7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.877394 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.877773 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.877786 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.877799 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8xsg\" (UniqueName: \"kubernetes.io/projected/76420939-68ca-412d-8db0-9a2dd7b0cba7-kube-api-access-h8xsg\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:36 crc kubenswrapper[4788]: I1211 09:42:36.877813 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76420939-68ca-412d-8db0-9a2dd7b0cba7-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:42:37 crc kubenswrapper[4788]: I1211 09:42:37.209155 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" event={"ID":"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8","Type":"ContainerStarted","Data":"f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4"} Dec 11 09:42:37 crc kubenswrapper[4788]: I1211 09:42:37.209353 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:37 crc kubenswrapper[4788]: I1211 09:42:37.215304 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" event={"ID":"76420939-68ca-412d-8db0-9a2dd7b0cba7","Type":"ContainerDied","Data":"a4786273e17cd62d8ca2eee48bd72ac109d5b95ecf69562c20922e79cbc0b1d9"} Dec 11 09:42:37 crc kubenswrapper[4788]: I1211 09:42:37.215357 4788 scope.go:117] "RemoveContainer" containerID="0ac1760fb6959d8c9be63201e2deb398e15ee61b6676afffe3fb5f563d0e9049" Dec 11 09:42:37 crc kubenswrapper[4788]: I1211 09:42:37.215558 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-w5vtj" Dec 11 09:42:37 crc kubenswrapper[4788]: I1211 09:42:37.310151 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" podStartSLOduration=8.310131755 podStartE2EDuration="8.310131755s" podCreationTimestamp="2025-12-11 09:42:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:42:37.244523467 +0000 UTC m=+1287.315303063" watchObservedRunningTime="2025-12-11 09:42:37.310131755 +0000 UTC m=+1287.380911331" Dec 11 09:42:37 crc kubenswrapper[4788]: I1211 09:42:37.333938 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-w5vtj"] Dec 11 09:42:37 crc kubenswrapper[4788]: I1211 09:42:37.345088 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-w5vtj"] Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.512041 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76420939-68ca-412d-8db0-9a2dd7b0cba7" path="/var/lib/kubelet/pods/76420939-68ca-412d-8db0-9a2dd7b0cba7/volumes" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.538287 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7c768b8c89-58hs6"] Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.576196 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7db769bcbd-mnjsv"] Dec 11 09:42:38 crc kubenswrapper[4788]: E1211 09:42:38.576766 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76420939-68ca-412d-8db0-9a2dd7b0cba7" containerName="init" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.576785 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="76420939-68ca-412d-8db0-9a2dd7b0cba7" containerName="init" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.576966 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="76420939-68ca-412d-8db0-9a2dd7b0cba7" containerName="init" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.578392 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.584975 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.598049 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7db769bcbd-mnjsv"] Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.657847 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5b4cc889c-kk475"] Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.667194 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-84d5c869dd-hzg6f"] Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.669293 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.689655 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84d5c869dd-hzg6f"] Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.757726 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6jsx\" (UniqueName: \"kubernetes.io/projected/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-kube-api-access-l6jsx\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.757877 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-logs\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.757925 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-config-data\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.758035 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-combined-ca-bundle\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.758080 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-secret-key\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.758159 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-tls-certs\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.758251 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-scripts\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.860059 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-logs\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.860124 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-combined-ca-bundle\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.860157 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-tls-certs\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.860368 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-scripts\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.860466 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6jsx\" (UniqueName: \"kubernetes.io/projected/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-kube-api-access-l6jsx\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.860519 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-scripts\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.860575 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-config-data\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.860642 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-horizon-tls-certs\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.860799 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-logs\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.860867 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5vzf\" (UniqueName: \"kubernetes.io/projected/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-kube-api-access-r5vzf\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.860942 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-config-data\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.861040 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-combined-ca-bundle\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.861067 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-horizon-secret-key\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.861110 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-secret-key\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.861223 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-logs\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.861533 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-scripts\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.861988 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-config-data\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.866744 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-combined-ca-bundle\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.869201 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-tls-certs\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.872576 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-secret-key\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.962740 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-horizon-secret-key\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.962863 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-logs\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.962896 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-combined-ca-bundle\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.962977 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-scripts\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.963008 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-config-data\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.963031 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-horizon-tls-certs\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.963097 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5vzf\" (UniqueName: \"kubernetes.io/projected/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-kube-api-access-r5vzf\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.992473 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-scripts\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.992909 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-logs\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.993073 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-horizon-secret-key\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.993846 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-config-data\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.995614 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-combined-ca-bundle\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:38 crc kubenswrapper[4788]: I1211 09:42:38.997911 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-horizon-tls-certs\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:39 crc kubenswrapper[4788]: I1211 09:42:39.021576 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5vzf\" (UniqueName: \"kubernetes.io/projected/3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251-kube-api-access-r5vzf\") pod \"horizon-84d5c869dd-hzg6f\" (UID: \"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251\") " pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:39 crc kubenswrapper[4788]: I1211 09:42:39.292075 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:42:39 crc kubenswrapper[4788]: I1211 09:42:39.751643 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84d5c869dd-hzg6f"] Dec 11 09:42:40 crc kubenswrapper[4788]: I1211 09:42:40.125313 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6jsx\" (UniqueName: \"kubernetes.io/projected/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-kube-api-access-l6jsx\") pod \"horizon-7db769bcbd-mnjsv\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:40 crc kubenswrapper[4788]: I1211 09:42:40.257496 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84d5c869dd-hzg6f" event={"ID":"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251","Type":"ContainerStarted","Data":"0cc9b0741b302e2fb84d18a22c4a2aadfc5d43853e290db895555c7e9a65d3a6"} Dec 11 09:42:40 crc kubenswrapper[4788]: I1211 09:42:40.404102 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:42:40 crc kubenswrapper[4788]: I1211 09:42:40.946034 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7db769bcbd-mnjsv"] Dec 11 09:42:41 crc kubenswrapper[4788]: I1211 09:42:41.271185 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db769bcbd-mnjsv" event={"ID":"a9231129-6aaf-4d8e-83fe-cc79ba9d135b","Type":"ContainerStarted","Data":"c0b5f233936f440d88cab19ad462ad85eee2d1c786738679f0c023936915636d"} Dec 11 09:42:45 crc kubenswrapper[4788]: I1211 09:42:45.828387 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:42:45 crc kubenswrapper[4788]: I1211 09:42:45.885066 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-hk97f"] Dec 11 09:42:45 crc kubenswrapper[4788]: I1211 09:42:45.885378 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" containerID="cri-o://83b2aaed2447d8eaca6eb6350a8212f4fef4162bef8f78c63e118f1be6b93791" gracePeriod=10 Dec 11 09:42:50 crc kubenswrapper[4788]: I1211 09:42:50.157459 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: connect: connection refused" Dec 11 09:42:52 crc kubenswrapper[4788]: I1211 09:42:52.367102 4788 generic.go:334] "Generic (PLEG): container finished" podID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerID="83b2aaed2447d8eaca6eb6350a8212f4fef4162bef8f78c63e118f1be6b93791" exitCode=0 Dec 11 09:42:52 crc kubenswrapper[4788]: I1211 09:42:52.367352 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" event={"ID":"c415f833-a0e0-428b-9ece-6c5f617dfc18","Type":"ContainerDied","Data":"83b2aaed2447d8eaca6eb6350a8212f4fef4162bef8f78c63e118f1be6b93791"} Dec 11 09:43:03 crc kubenswrapper[4788]: E1211 09:42:58.542575 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Dec 11 09:43:03 crc kubenswrapper[4788]: E1211 09:42:58.543339 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nf4h675h5b4hd8hf5hc6h5cdh59ch549h565h65bh578h644hf4h554h5f7h644h68fh8ch546h665h546h567h5bh67fh89h644h564h694h678hb5h558q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h4sxx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(420d2814-b791-4928-8ebf-691365abfec1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:43:03 crc kubenswrapper[4788]: I1211 09:43:00.157479 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Dec 11 09:43:05 crc kubenswrapper[4788]: I1211 09:43:05.158701 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Dec 11 09:43:05 crc kubenswrapper[4788]: I1211 09:43:05.159470 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:43:07 crc kubenswrapper[4788]: E1211 09:43:07.781443 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 11 09:43:07 crc kubenswrapper[4788]: E1211 09:43:07.781890 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8ggfb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-bkcpt_openstack(539632da-1b63-429c-9d5c-3be34a9457ad): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:43:07 crc kubenswrapper[4788]: E1211 09:43:07.783033 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-bkcpt" podUID="539632da-1b63-429c-9d5c-3be34a9457ad" Dec 11 09:43:08 crc kubenswrapper[4788]: E1211 09:43:08.526746 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-bkcpt" podUID="539632da-1b63-429c-9d5c-3be34a9457ad" Dec 11 09:43:10 crc kubenswrapper[4788]: I1211 09:43:10.160013 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Dec 11 09:43:14 crc kubenswrapper[4788]: E1211 09:43:14.035726 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[etc-swift], unattached volumes=[], failed to process volumes=[]: context deadline exceeded" pod="openstack/swift-storage-0" podUID="874f552a-7856-439c-937c-a87d9c15305c" Dec 11 09:43:14 crc kubenswrapper[4788]: I1211 09:43:14.573165 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 11 09:43:15 crc kubenswrapper[4788]: I1211 09:43:15.160892 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Dec 11 09:43:16 crc kubenswrapper[4788]: E1211 09:43:16.211290 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Dec 11 09:43:16 crc kubenswrapper[4788]: E1211 09:43:16.211470 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qbx6z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-tq2kw_openstack(d2f70f9c-2c54-4a03-adbc-af34bf91a63b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:43:16 crc kubenswrapper[4788]: E1211 09:43:16.213036 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-tq2kw" podUID="d2f70f9c-2c54-4a03-adbc-af34bf91a63b" Dec 11 09:43:16 crc kubenswrapper[4788]: E1211 09:43:16.588864 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-tq2kw" podUID="d2f70f9c-2c54-4a03-adbc-af34bf91a63b" Dec 11 09:43:19 crc kubenswrapper[4788]: I1211 09:43:19.025041 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:43:19 crc kubenswrapper[4788]: I1211 09:43:19.146090 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/874f552a-7856-439c-937c-a87d9c15305c-etc-swift\") pod \"swift-storage-0\" (UID: \"874f552a-7856-439c-937c-a87d9c15305c\") " pod="openstack/swift-storage-0" Dec 11 09:43:19 crc kubenswrapper[4788]: I1211 09:43:19.375006 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 11 09:43:20 crc kubenswrapper[4788]: I1211 09:43:20.162343 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Dec 11 09:43:21 crc kubenswrapper[4788]: I1211 09:43:21.369575 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:43:21 crc kubenswrapper[4788]: I1211 09:43:21.369646 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:43:25 crc kubenswrapper[4788]: I1211 09:43:25.163275 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Dec 11 09:43:28 crc kubenswrapper[4788]: E1211 09:43:28.931385 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 11 09:43:28 crc kubenswrapper[4788]: E1211 09:43:28.931926 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n575h645h57fh5fch665h5d4h69h585hbh94h8ch5fdh56h5b7h55bh598hb8h5c4h9h6bhbch697h596h54h86hddh54bh69h547h7h85h9bq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rz966,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-747958c97-dq5mr_openstack(9dc702d3-65d8-4643-b3fe-9517e8bff3b8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:43:28 crc kubenswrapper[4788]: E1211 09:43:28.942700 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-747958c97-dq5mr" podUID="9dc702d3-65d8-4643-b3fe-9517e8bff3b8" Dec 11 09:43:29 crc kubenswrapper[4788]: I1211 09:43:29.865741 4788 generic.go:334] "Generic (PLEG): container finished" podID="e582e0d2-1009-4db1-bbae-9a1d18041e74" containerID="0ed4348b9228569d67703a8fd887ba7da79f772c3db0497c59e8a0b0ff2d7465" exitCode=0 Dec 11 09:43:29 crc kubenswrapper[4788]: I1211 09:43:29.866170 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5tp2p" event={"ID":"e582e0d2-1009-4db1-bbae-9a1d18041e74","Type":"ContainerDied","Data":"0ed4348b9228569d67703a8fd887ba7da79f772c3db0497c59e8a0b0ff2d7465"} Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.164237 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Dec 11 09:43:30 crc kubenswrapper[4788]: E1211 09:43:30.636734 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 11 09:43:30 crc kubenswrapper[4788]: E1211 09:43:30.637027 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 11 09:43:30 crc kubenswrapper[4788]: E1211 09:43:30.637498 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5cbh687h87h8ch75h595h5d4h74h66fh548hcbh697h599h644h545hd4hc8hfch65dh68h54h574h66dh557h674h5cdhd4h54ch4hf9h69h584q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r5vzf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-84d5c869dd-hzg6f_openstack(3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:43:30 crc kubenswrapper[4788]: E1211 09:43:30.637527 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n549h8bhb9h68ch669h5b9h666h65h569h595hd6h5fh5dch685h556h58h569h548h699h5bh684h64bh556hcfh9fh575h694h565h585hf6hcbh6bq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zrmdm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7c768b8c89-58hs6_openstack(53fad680-1537-4631-a479-83e53ce72983): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:43:30 crc kubenswrapper[4788]: E1211 09:43:30.639903 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7c768b8c89-58hs6" podUID="53fad680-1537-4631-a479-83e53ce72983" Dec 11 09:43:30 crc kubenswrapper[4788]: E1211 09:43:30.640021 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-84d5c869dd-hzg6f" podUID="3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251" Dec 11 09:43:30 crc kubenswrapper[4788]: E1211 09:43:30.651345 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 11 09:43:30 crc kubenswrapper[4788]: E1211 09:43:30.651523 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n669h574hb7h55bh569hfh9dh65fh588h64dh59h85h8fh5f6hbfh558h685h697h578hc9h566h647h567h674h5dbh56ch649h566h5b4h5d6hf4h67cq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tq2xd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-5b4cc889c-kk475_openstack(f5296a92-0fde-4d63-873c-724ba7e577a5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:43:30 crc kubenswrapper[4788]: E1211 09:43:30.655536 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-5b4cc889c-kk475" podUID="f5296a92-0fde-4d63-873c-724ba7e577a5" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.716919 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.722607 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.875622 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" event={"ID":"c415f833-a0e0-428b-9ece-6c5f617dfc18","Type":"ContainerDied","Data":"296497b24e1c14a78529438789cece2933f3651afbc804c4288dd6aa19f8bcef"} Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.875681 4788 scope.go:117] "RemoveContainer" containerID="83b2aaed2447d8eaca6eb6350a8212f4fef4162bef8f78c63e118f1be6b93791" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.875682 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.878280 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-747958c97-dq5mr" event={"ID":"9dc702d3-65d8-4643-b3fe-9517e8bff3b8","Type":"ContainerDied","Data":"a3196bc4540246b4e82d659cd515bb271ffba6094067b0a6bef5300ae17851f0"} Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.878359 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-747958c97-dq5mr" Dec 11 09:43:30 crc kubenswrapper[4788]: E1211 09:43:30.882291 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-84d5c869dd-hzg6f" podUID="3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.909206 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-logs\") pod \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.909386 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-horizon-secret-key\") pod \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.909571 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-nb\") pod \"c415f833-a0e0-428b-9ece-6c5f617dfc18\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.909629 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-sb\") pod \"c415f833-a0e0-428b-9ece-6c5f617dfc18\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.909695 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwsbl\" (UniqueName: \"kubernetes.io/projected/c415f833-a0e0-428b-9ece-6c5f617dfc18-kube-api-access-kwsbl\") pod \"c415f833-a0e0-428b-9ece-6c5f617dfc18\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.909752 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-config\") pod \"c415f833-a0e0-428b-9ece-6c5f617dfc18\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.909824 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-config-data\") pod \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.909859 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rz966\" (UniqueName: \"kubernetes.io/projected/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-kube-api-access-rz966\") pod \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.909909 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-dns-svc\") pod \"c415f833-a0e0-428b-9ece-6c5f617dfc18\" (UID: \"c415f833-a0e0-428b-9ece-6c5f617dfc18\") " Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.909930 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-scripts\") pod \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\" (UID: \"9dc702d3-65d8-4643-b3fe-9517e8bff3b8\") " Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.911511 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-scripts" (OuterVolumeSpecName: "scripts") pod "9dc702d3-65d8-4643-b3fe-9517e8bff3b8" (UID: "9dc702d3-65d8-4643-b3fe-9517e8bff3b8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.912391 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-logs" (OuterVolumeSpecName: "logs") pod "9dc702d3-65d8-4643-b3fe-9517e8bff3b8" (UID: "9dc702d3-65d8-4643-b3fe-9517e8bff3b8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.925412 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-config-data" (OuterVolumeSpecName: "config-data") pod "9dc702d3-65d8-4643-b3fe-9517e8bff3b8" (UID: "9dc702d3-65d8-4643-b3fe-9517e8bff3b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.927000 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c415f833-a0e0-428b-9ece-6c5f617dfc18-kube-api-access-kwsbl" (OuterVolumeSpecName: "kube-api-access-kwsbl") pod "c415f833-a0e0-428b-9ece-6c5f617dfc18" (UID: "c415f833-a0e0-428b-9ece-6c5f617dfc18"). InnerVolumeSpecName "kube-api-access-kwsbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.927220 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-kube-api-access-rz966" (OuterVolumeSpecName: "kube-api-access-rz966") pod "9dc702d3-65d8-4643-b3fe-9517e8bff3b8" (UID: "9dc702d3-65d8-4643-b3fe-9517e8bff3b8"). InnerVolumeSpecName "kube-api-access-rz966". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.933935 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "9dc702d3-65d8-4643-b3fe-9517e8bff3b8" (UID: "9dc702d3-65d8-4643-b3fe-9517e8bff3b8"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.972476 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-config" (OuterVolumeSpecName: "config") pod "c415f833-a0e0-428b-9ece-6c5f617dfc18" (UID: "c415f833-a0e0-428b-9ece-6c5f617dfc18"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.978735 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c415f833-a0e0-428b-9ece-6c5f617dfc18" (UID: "c415f833-a0e0-428b-9ece-6c5f617dfc18"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.983878 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c415f833-a0e0-428b-9ece-6c5f617dfc18" (UID: "c415f833-a0e0-428b-9ece-6c5f617dfc18"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:43:30 crc kubenswrapper[4788]: I1211 09:43:30.987528 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c415f833-a0e0-428b-9ece-6c5f617dfc18" (UID: "c415f833-a0e0-428b-9ece-6c5f617dfc18"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.012667 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.012694 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.012705 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwsbl\" (UniqueName: \"kubernetes.io/projected/c415f833-a0e0-428b-9ece-6c5f617dfc18-kube-api-access-kwsbl\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.012715 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.012725 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.012736 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rz966\" (UniqueName: \"kubernetes.io/projected/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-kube-api-access-rz966\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.012745 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c415f833-a0e0-428b-9ece-6c5f617dfc18-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.012754 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.012762 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.012770 4788 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9dc702d3-65d8-4643-b3fe-9517e8bff3b8-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.280590 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-747958c97-dq5mr"] Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.297843 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-747958c97-dq5mr"] Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.306933 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-hk97f"] Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.316432 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-hk97f"] Dec 11 09:43:31 crc kubenswrapper[4788]: E1211 09:43:31.423843 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 11 09:43:31 crc kubenswrapper[4788]: E1211 09:43:31.424200 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-brn4t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-scdh4_openstack(169bf402-16b5-4aa7-838a-094a2e4c3330): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:43:31 crc kubenswrapper[4788]: E1211 09:43:31.425410 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-scdh4" podUID="169bf402-16b5-4aa7-838a-094a2e4c3330" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.452560 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.474502 4788 scope.go:117] "RemoveContainer" containerID="bc1c19a6197e7b634b7b16fc3dc44ecbbc99c3bde1af082cb275a256c39e134c" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.474619 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:43:31 crc kubenswrapper[4788]: E1211 09:43:31.480026 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 11 09:43:31 crc kubenswrapper[4788]: E1211 09:43:31.480154 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nbfh686h5b9h57ch587h66h697h585h5ch7fh9h588h59dh5d9h7dh78h596h5fch656h86h544h5b8h6bh6fh689h565hd7h645hfbh5c9h585h56dq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l6jsx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7db769bcbd-mnjsv_openstack(a9231129-6aaf-4d8e-83fe-cc79ba9d135b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.482531 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:43:31 crc kubenswrapper[4788]: E1211 09:43:31.520606 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7db769bcbd-mnjsv" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623392 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f5296a92-0fde-4d63-873c-724ba7e577a5-horizon-secret-key\") pod \"f5296a92-0fde-4d63-873c-724ba7e577a5\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623434 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-scripts\") pod \"f5296a92-0fde-4d63-873c-724ba7e577a5\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623456 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53fad680-1537-4631-a479-83e53ce72983-logs\") pod \"53fad680-1537-4631-a479-83e53ce72983\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623473 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-credential-keys\") pod \"e582e0d2-1009-4db1-bbae-9a1d18041e74\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623515 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tq2xd\" (UniqueName: \"kubernetes.io/projected/f5296a92-0fde-4d63-873c-724ba7e577a5-kube-api-access-tq2xd\") pod \"f5296a92-0fde-4d63-873c-724ba7e577a5\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623532 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-combined-ca-bundle\") pod \"e582e0d2-1009-4db1-bbae-9a1d18041e74\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623611 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-config-data\") pod \"53fad680-1537-4631-a479-83e53ce72983\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623736 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2x98\" (UniqueName: \"kubernetes.io/projected/e582e0d2-1009-4db1-bbae-9a1d18041e74-kube-api-access-x2x98\") pod \"e582e0d2-1009-4db1-bbae-9a1d18041e74\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623762 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-config-data\") pod \"e582e0d2-1009-4db1-bbae-9a1d18041e74\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623791 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-scripts\") pod \"e582e0d2-1009-4db1-bbae-9a1d18041e74\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623812 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrmdm\" (UniqueName: \"kubernetes.io/projected/53fad680-1537-4631-a479-83e53ce72983-kube-api-access-zrmdm\") pod \"53fad680-1537-4631-a479-83e53ce72983\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623838 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53fad680-1537-4631-a479-83e53ce72983-horizon-secret-key\") pod \"53fad680-1537-4631-a479-83e53ce72983\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623864 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-config-data\") pod \"f5296a92-0fde-4d63-873c-724ba7e577a5\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623909 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-scripts\") pod \"53fad680-1537-4631-a479-83e53ce72983\" (UID: \"53fad680-1537-4631-a479-83e53ce72983\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623932 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-fernet-keys\") pod \"e582e0d2-1009-4db1-bbae-9a1d18041e74\" (UID: \"e582e0d2-1009-4db1-bbae-9a1d18041e74\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.623957 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5296a92-0fde-4d63-873c-724ba7e577a5-logs\") pod \"f5296a92-0fde-4d63-873c-724ba7e577a5\" (UID: \"f5296a92-0fde-4d63-873c-724ba7e577a5\") " Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.624790 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53fad680-1537-4631-a479-83e53ce72983-logs" (OuterVolumeSpecName: "logs") pod "53fad680-1537-4631-a479-83e53ce72983" (UID: "53fad680-1537-4631-a479-83e53ce72983"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.624887 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-scripts" (OuterVolumeSpecName: "scripts") pod "f5296a92-0fde-4d63-873c-724ba7e577a5" (UID: "f5296a92-0fde-4d63-873c-724ba7e577a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.624926 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-config-data" (OuterVolumeSpecName: "config-data") pod "f5296a92-0fde-4d63-873c-724ba7e577a5" (UID: "f5296a92-0fde-4d63-873c-724ba7e577a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.625441 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5296a92-0fde-4d63-873c-724ba7e577a5-logs" (OuterVolumeSpecName: "logs") pod "f5296a92-0fde-4d63-873c-724ba7e577a5" (UID: "f5296a92-0fde-4d63-873c-724ba7e577a5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.629250 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-scripts" (OuterVolumeSpecName: "scripts") pod "53fad680-1537-4631-a479-83e53ce72983" (UID: "53fad680-1537-4631-a479-83e53ce72983"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.629291 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5296a92-0fde-4d63-873c-724ba7e577a5-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "f5296a92-0fde-4d63-873c-724ba7e577a5" (UID: "f5296a92-0fde-4d63-873c-724ba7e577a5"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.629312 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e582e0d2-1009-4db1-bbae-9a1d18041e74" (UID: "e582e0d2-1009-4db1-bbae-9a1d18041e74"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.629932 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-config-data" (OuterVolumeSpecName: "config-data") pod "53fad680-1537-4631-a479-83e53ce72983" (UID: "53fad680-1537-4631-a479-83e53ce72983"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.630133 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-scripts" (OuterVolumeSpecName: "scripts") pod "e582e0d2-1009-4db1-bbae-9a1d18041e74" (UID: "e582e0d2-1009-4db1-bbae-9a1d18041e74"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.630698 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53fad680-1537-4631-a479-83e53ce72983-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "53fad680-1537-4631-a479-83e53ce72983" (UID: "53fad680-1537-4631-a479-83e53ce72983"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.630955 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e582e0d2-1009-4db1-bbae-9a1d18041e74-kube-api-access-x2x98" (OuterVolumeSpecName: "kube-api-access-x2x98") pod "e582e0d2-1009-4db1-bbae-9a1d18041e74" (UID: "e582e0d2-1009-4db1-bbae-9a1d18041e74"). InnerVolumeSpecName "kube-api-access-x2x98". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.632420 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5296a92-0fde-4d63-873c-724ba7e577a5-kube-api-access-tq2xd" (OuterVolumeSpecName: "kube-api-access-tq2xd") pod "f5296a92-0fde-4d63-873c-724ba7e577a5" (UID: "f5296a92-0fde-4d63-873c-724ba7e577a5"). InnerVolumeSpecName "kube-api-access-tq2xd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.634829 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53fad680-1537-4631-a479-83e53ce72983-kube-api-access-zrmdm" (OuterVolumeSpecName: "kube-api-access-zrmdm") pod "53fad680-1537-4631-a479-83e53ce72983" (UID: "53fad680-1537-4631-a479-83e53ce72983"). InnerVolumeSpecName "kube-api-access-zrmdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.635745 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e582e0d2-1009-4db1-bbae-9a1d18041e74" (UID: "e582e0d2-1009-4db1-bbae-9a1d18041e74"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.689135 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-config-data" (OuterVolumeSpecName: "config-data") pod "e582e0d2-1009-4db1-bbae-9a1d18041e74" (UID: "e582e0d2-1009-4db1-bbae-9a1d18041e74"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.696119 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e582e0d2-1009-4db1-bbae-9a1d18041e74" (UID: "e582e0d2-1009-4db1-bbae-9a1d18041e74"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726253 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726286 4788 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726300 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5296a92-0fde-4d63-873c-724ba7e577a5-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726309 4788 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f5296a92-0fde-4d63-873c-724ba7e577a5-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726320 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726332 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53fad680-1537-4631-a479-83e53ce72983-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726341 4788 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726350 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726358 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tq2xd\" (UniqueName: \"kubernetes.io/projected/f5296a92-0fde-4d63-873c-724ba7e577a5-kube-api-access-tq2xd\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726369 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/53fad680-1537-4631-a479-83e53ce72983-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726377 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2x98\" (UniqueName: \"kubernetes.io/projected/e582e0d2-1009-4db1-bbae-9a1d18041e74-kube-api-access-x2x98\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726385 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726393 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e582e0d2-1009-4db1-bbae-9a1d18041e74-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726400 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrmdm\" (UniqueName: \"kubernetes.io/projected/53fad680-1537-4631-a479-83e53ce72983-kube-api-access-zrmdm\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726408 4788 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/53fad680-1537-4631-a479-83e53ce72983-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.726416 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5296a92-0fde-4d63-873c-724ba7e577a5-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:31 crc kubenswrapper[4788]: E1211 09:43:31.880749 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified" Dec 11 09:43:31 crc kubenswrapper[4788]: E1211 09:43:31.880918 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-notification-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nf4h675h5b4hd8hf5hc6h5cdh59ch549h565h65bh578h644hf4h554h5f7h644h68fh8ch546h665h546h567h5bh67fh89h644h564h694h678hb5h558q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-notification-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h4sxx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/notificationhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(420d2814-b791-4928-8ebf-691365abfec1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.892826 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5tp2p" event={"ID":"e582e0d2-1009-4db1-bbae-9a1d18041e74","Type":"ContainerDied","Data":"f8c7d02729c9b90138f00fa77dd2c83ce2ff21861d7d326817a2a15a1b7b00ac"} Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.892874 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8c7d02729c9b90138f00fa77dd2c83ce2ff21861d7d326817a2a15a1b7b00ac" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.892935 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5tp2p" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.898443 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c768b8c89-58hs6" event={"ID":"53fad680-1537-4631-a479-83e53ce72983","Type":"ContainerDied","Data":"8e3cbff85bc3bee3fda409b381abb24850f4ae956dd26bd5ed50b21144a69938"} Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.898475 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c768b8c89-58hs6" Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.899487 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b4cc889c-kk475" event={"ID":"f5296a92-0fde-4d63-873c-724ba7e577a5","Type":"ContainerDied","Data":"053b55fce64ffe926ea4df0a1e081b4d51571002509164bf39ff0d11bd14cf30"} Dec 11 09:43:31 crc kubenswrapper[4788]: I1211 09:43:31.899505 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b4cc889c-kk475" Dec 11 09:43:31 crc kubenswrapper[4788]: E1211 09:43:31.916107 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-scdh4" podUID="169bf402-16b5-4aa7-838a-094a2e4c3330" Dec 11 09:43:31 crc kubenswrapper[4788]: E1211 09:43:31.938766 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7db769bcbd-mnjsv" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.012308 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-5tp2p"] Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.019652 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-5tp2p"] Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.152290 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7c768b8c89-58hs6"] Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.166683 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7c768b8c89-58hs6"] Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.180895 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-qfqsj"] Dec 11 09:43:32 crc kubenswrapper[4788]: E1211 09:43:32.181552 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.181574 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" Dec 11 09:43:32 crc kubenswrapper[4788]: E1211 09:43:32.181650 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e582e0d2-1009-4db1-bbae-9a1d18041e74" containerName="keystone-bootstrap" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.181661 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="e582e0d2-1009-4db1-bbae-9a1d18041e74" containerName="keystone-bootstrap" Dec 11 09:43:32 crc kubenswrapper[4788]: E1211 09:43:32.181691 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="init" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.181704 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="init" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.181951 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.181975 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="e582e0d2-1009-4db1-bbae-9a1d18041e74" containerName="keystone-bootstrap" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.182981 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.186835 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.187050 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.187167 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gthls" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.187335 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.190791 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.196360 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-qfqsj"] Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.222100 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5b4cc889c-kk475"] Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.231841 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5b4cc889c-kk475"] Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.350853 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-fernet-keys\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.350914 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-combined-ca-bundle\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.350995 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-config-data\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.351020 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxsw8\" (UniqueName: \"kubernetes.io/projected/ed0ae66f-f028-4fd2-9617-37f1148ec651-kube-api-access-lxsw8\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.351058 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-credential-keys\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.351151 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-scripts\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.452938 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-fernet-keys\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.453010 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-combined-ca-bundle\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.453054 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-config-data\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.453085 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxsw8\" (UniqueName: \"kubernetes.io/projected/ed0ae66f-f028-4fd2-9617-37f1148ec651-kube-api-access-lxsw8\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.453127 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-credential-keys\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.453190 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-scripts\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.459502 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-scripts\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.459584 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-credential-keys\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.461159 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-combined-ca-bundle\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.465987 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-fernet-keys\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.468705 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-config-data\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.473549 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxsw8\" (UniqueName: \"kubernetes.io/projected/ed0ae66f-f028-4fd2-9617-37f1148ec651-kube-api-access-lxsw8\") pod \"keystone-bootstrap-qfqsj\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.523623 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.543546 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53fad680-1537-4631-a479-83e53ce72983" path="/var/lib/kubelet/pods/53fad680-1537-4631-a479-83e53ce72983/volumes" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.543990 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dc702d3-65d8-4643-b3fe-9517e8bff3b8" path="/var/lib/kubelet/pods/9dc702d3-65d8-4643-b3fe-9517e8bff3b8/volumes" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.544703 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" path="/var/lib/kubelet/pods/c415f833-a0e0-428b-9ece-6c5f617dfc18/volumes" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.545438 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e582e0d2-1009-4db1-bbae-9a1d18041e74" path="/var/lib/kubelet/pods/e582e0d2-1009-4db1-bbae-9a1d18041e74/volumes" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.546755 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5296a92-0fde-4d63-873c-724ba7e577a5" path="/var/lib/kubelet/pods/f5296a92-0fde-4d63-873c-724ba7e577a5/volumes" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.590755 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.912586 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"49e30d43619d8973cced4387316621bfd7eb8fb447b0607c1993658cc42428ba"} Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.914376 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bkcpt" event={"ID":"539632da-1b63-429c-9d5c-3be34a9457ad","Type":"ContainerStarted","Data":"ff2151bc49fd0cafd73c9fb2ceb0468e3d2614a208ebd9fe5817ecee62fe6e97"} Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.916635 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tq2kw" event={"ID":"d2f70f9c-2c54-4a03-adbc-af34bf91a63b","Type":"ContainerStarted","Data":"0aaa8107acc882b556c26a9d7b90f94920650804284bf1b4bb9f0c8b89899851"} Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.935156 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-bkcpt" podStartSLOduration=3.796157747 podStartE2EDuration="1m3.935137168s" podCreationTimestamp="2025-12-11 09:42:29 +0000 UTC" firstStartedPulling="2025-12-11 09:42:31.821784737 +0000 UTC m=+1281.892564323" lastFinishedPulling="2025-12-11 09:43:31.960764158 +0000 UTC m=+1342.031543744" observedRunningTime="2025-12-11 09:43:32.931100045 +0000 UTC m=+1343.001879651" watchObservedRunningTime="2025-12-11 09:43:32.935137168 +0000 UTC m=+1343.005916754" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.955965 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-tq2kw" podStartSLOduration=4.076151408 podStartE2EDuration="1m3.95594338s" podCreationTimestamp="2025-12-11 09:42:29 +0000 UTC" firstStartedPulling="2025-12-11 09:42:32.079152999 +0000 UTC m=+1282.149932585" lastFinishedPulling="2025-12-11 09:43:31.958944981 +0000 UTC m=+1342.029724557" observedRunningTime="2025-12-11 09:43:32.950681635 +0000 UTC m=+1343.021461221" watchObservedRunningTime="2025-12-11 09:43:32.95594338 +0000 UTC m=+1343.026722966" Dec 11 09:43:32 crc kubenswrapper[4788]: I1211 09:43:32.986172 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-qfqsj"] Dec 11 09:43:33 crc kubenswrapper[4788]: I1211 09:43:33.929367 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qfqsj" event={"ID":"ed0ae66f-f028-4fd2-9617-37f1148ec651","Type":"ContainerStarted","Data":"ec200f33775fdcd3eabce8f962f5c0e6efc74869f3e1d59025dd4044cc0ed420"} Dec 11 09:43:33 crc kubenswrapper[4788]: I1211 09:43:33.929743 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qfqsj" event={"ID":"ed0ae66f-f028-4fd2-9617-37f1148ec651","Type":"ContainerStarted","Data":"852da373e92c1d2a008fe7c1730b0e49cdbcd0c25ea2420ca383e5e336c6eeca"} Dec 11 09:43:33 crc kubenswrapper[4788]: I1211 09:43:33.964473 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-qfqsj" podStartSLOduration=1.964450244 podStartE2EDuration="1.964450244s" podCreationTimestamp="2025-12-11 09:43:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:43:33.954097479 +0000 UTC m=+1344.024877065" watchObservedRunningTime="2025-12-11 09:43:33.964450244 +0000 UTC m=+1344.035229830" Dec 11 09:43:35 crc kubenswrapper[4788]: I1211 09:43:35.165431 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-hk97f" podUID="c415f833-a0e0-428b-9ece-6c5f617dfc18" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Dec 11 09:43:37 crc kubenswrapper[4788]: I1211 09:43:37.970433 4788 generic.go:334] "Generic (PLEG): container finished" podID="d2f70f9c-2c54-4a03-adbc-af34bf91a63b" containerID="0aaa8107acc882b556c26a9d7b90f94920650804284bf1b4bb9f0c8b89899851" exitCode=0 Dec 11 09:43:37 crc kubenswrapper[4788]: I1211 09:43:37.970597 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tq2kw" event={"ID":"d2f70f9c-2c54-4a03-adbc-af34bf91a63b","Type":"ContainerDied","Data":"0aaa8107acc882b556c26a9d7b90f94920650804284bf1b4bb9f0c8b89899851"} Dec 11 09:43:37 crc kubenswrapper[4788]: I1211 09:43:37.974726 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"420d2814-b791-4928-8ebf-691365abfec1","Type":"ContainerStarted","Data":"852e99eecda7b3a4ac5b2061f808910328354db8ad86533ede4fff03cb48ae2a"} Dec 11 09:43:37 crc kubenswrapper[4788]: I1211 09:43:37.977250 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"7fe936df85aac71838ed1cb1b9aea6829a60588d0d485f05145a2d74f1232fdc"} Dec 11 09:43:37 crc kubenswrapper[4788]: I1211 09:43:37.977377 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"d46726bf661eeca6cff93599f005dda80019e6f835a6129c2724f8339ab6dab5"} Dec 11 09:43:37 crc kubenswrapper[4788]: I1211 09:43:37.978440 4788 generic.go:334] "Generic (PLEG): container finished" podID="ed0ae66f-f028-4fd2-9617-37f1148ec651" containerID="ec200f33775fdcd3eabce8f962f5c0e6efc74869f3e1d59025dd4044cc0ed420" exitCode=0 Dec 11 09:43:37 crc kubenswrapper[4788]: I1211 09:43:37.978550 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qfqsj" event={"ID":"ed0ae66f-f028-4fd2-9617-37f1148ec651","Type":"ContainerDied","Data":"ec200f33775fdcd3eabce8f962f5c0e6efc74869f3e1d59025dd4044cc0ed420"} Dec 11 09:43:38 crc kubenswrapper[4788]: I1211 09:43:38.995115 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"47b8595afcb35abc5f44a420df923d1b967d0eb62316c5c6d3c64703c2417260"} Dec 11 09:43:38 crc kubenswrapper[4788]: I1211 09:43:38.995443 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"deb4ad85dfc16a4cd61ee8846e877007f71a082bcd96fceade57c66b36160d9c"} Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.368444 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.375324 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tq2kw" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.417486 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-credential-keys\") pod \"ed0ae66f-f028-4fd2-9617-37f1148ec651\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.417574 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-config-data\") pod \"ed0ae66f-f028-4fd2-9617-37f1148ec651\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.417613 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-scripts\") pod \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.417698 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-logs\") pod \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.417774 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxsw8\" (UniqueName: \"kubernetes.io/projected/ed0ae66f-f028-4fd2-9617-37f1148ec651-kube-api-access-lxsw8\") pod \"ed0ae66f-f028-4fd2-9617-37f1148ec651\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.417827 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-fernet-keys\") pod \"ed0ae66f-f028-4fd2-9617-37f1148ec651\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.417864 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-config-data\") pod \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.417894 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-combined-ca-bundle\") pod \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.417919 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbx6z\" (UniqueName: \"kubernetes.io/projected/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-kube-api-access-qbx6z\") pod \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\" (UID: \"d2f70f9c-2c54-4a03-adbc-af34bf91a63b\") " Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.418117 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-combined-ca-bundle\") pod \"ed0ae66f-f028-4fd2-9617-37f1148ec651\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.418142 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-scripts\") pod \"ed0ae66f-f028-4fd2-9617-37f1148ec651\" (UID: \"ed0ae66f-f028-4fd2-9617-37f1148ec651\") " Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.426526 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-scripts" (OuterVolumeSpecName: "scripts") pod "ed0ae66f-f028-4fd2-9617-37f1148ec651" (UID: "ed0ae66f-f028-4fd2-9617-37f1148ec651"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.435221 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-scripts" (OuterVolumeSpecName: "scripts") pod "d2f70f9c-2c54-4a03-adbc-af34bf91a63b" (UID: "d2f70f9c-2c54-4a03-adbc-af34bf91a63b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.435746 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ed0ae66f-f028-4fd2-9617-37f1148ec651" (UID: "ed0ae66f-f028-4fd2-9617-37f1148ec651"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.436262 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-logs" (OuterVolumeSpecName: "logs") pod "d2f70f9c-2c54-4a03-adbc-af34bf91a63b" (UID: "d2f70f9c-2c54-4a03-adbc-af34bf91a63b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.438718 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ed0ae66f-f028-4fd2-9617-37f1148ec651" (UID: "ed0ae66f-f028-4fd2-9617-37f1148ec651"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.448524 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed0ae66f-f028-4fd2-9617-37f1148ec651-kube-api-access-lxsw8" (OuterVolumeSpecName: "kube-api-access-lxsw8") pod "ed0ae66f-f028-4fd2-9617-37f1148ec651" (UID: "ed0ae66f-f028-4fd2-9617-37f1148ec651"). InnerVolumeSpecName "kube-api-access-lxsw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.449034 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-kube-api-access-qbx6z" (OuterVolumeSpecName: "kube-api-access-qbx6z") pod "d2f70f9c-2c54-4a03-adbc-af34bf91a63b" (UID: "d2f70f9c-2c54-4a03-adbc-af34bf91a63b"). InnerVolumeSpecName "kube-api-access-qbx6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.475680 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed0ae66f-f028-4fd2-9617-37f1148ec651" (UID: "ed0ae66f-f028-4fd2-9617-37f1148ec651"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.478477 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d2f70f9c-2c54-4a03-adbc-af34bf91a63b" (UID: "d2f70f9c-2c54-4a03-adbc-af34bf91a63b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.479083 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-config-data" (OuterVolumeSpecName: "config-data") pod "ed0ae66f-f028-4fd2-9617-37f1148ec651" (UID: "ed0ae66f-f028-4fd2-9617-37f1148ec651"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.484698 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-config-data" (OuterVolumeSpecName: "config-data") pod "d2f70f9c-2c54-4a03-adbc-af34bf91a63b" (UID: "d2f70f9c-2c54-4a03-adbc-af34bf91a63b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.521173 4788 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.522544 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.522646 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.522752 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbx6z\" (UniqueName: \"kubernetes.io/projected/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-kube-api-access-qbx6z\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.522828 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.522914 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.522988 4788 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.523061 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0ae66f-f028-4fd2-9617-37f1148ec651-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.523147 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.523221 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2f70f9c-2c54-4a03-adbc-af34bf91a63b-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:39 crc kubenswrapper[4788]: I1211 09:43:39.523322 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxsw8\" (UniqueName: \"kubernetes.io/projected/ed0ae66f-f028-4fd2-9617-37f1148ec651-kube-api-access-lxsw8\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.017302 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qfqsj" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.019405 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qfqsj" event={"ID":"ed0ae66f-f028-4fd2-9617-37f1148ec651","Type":"ContainerDied","Data":"852da373e92c1d2a008fe7c1730b0e49cdbcd0c25ea2420ca383e5e336c6eeca"} Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.019906 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="852da373e92c1d2a008fe7c1730b0e49cdbcd0c25ea2420ca383e5e336c6eeca" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.029865 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tq2kw" event={"ID":"d2f70f9c-2c54-4a03-adbc-af34bf91a63b","Type":"ContainerDied","Data":"cde5eea2c2f4d4d08a7db4cd2520d9e9cd5595d71943c964e9710ac692a66250"} Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.029949 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cde5eea2c2f4d4d08a7db4cd2520d9e9cd5595d71943c964e9710ac692a66250" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.030102 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tq2kw" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.134928 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-c99b79967-dmp47"] Dec 11 09:43:40 crc kubenswrapper[4788]: E1211 09:43:40.135564 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2f70f9c-2c54-4a03-adbc-af34bf91a63b" containerName="placement-db-sync" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.135593 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2f70f9c-2c54-4a03-adbc-af34bf91a63b" containerName="placement-db-sync" Dec 11 09:43:40 crc kubenswrapper[4788]: E1211 09:43:40.135627 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0ae66f-f028-4fd2-9617-37f1148ec651" containerName="keystone-bootstrap" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.135636 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0ae66f-f028-4fd2-9617-37f1148ec651" containerName="keystone-bootstrap" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.135828 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed0ae66f-f028-4fd2-9617-37f1148ec651" containerName="keystone-bootstrap" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.135849 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2f70f9c-2c54-4a03-adbc-af34bf91a63b" containerName="placement-db-sync" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.136801 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.146043 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.146394 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.146479 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.147637 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.150743 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.152755 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gthls" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.153362 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-564d895c68-qq26m"] Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.155116 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.163693 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.163766 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rkclk" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.163814 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.163931 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.164402 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.168895 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c99b79967-dmp47"] Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.186098 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-564d895c68-qq26m"] Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237365 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-fernet-keys\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237429 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-config-data\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237459 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-combined-ca-bundle\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237496 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-combined-ca-bundle\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237525 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-public-tls-certs\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237543 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-credential-keys\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237568 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjrkl\" (UniqueName: \"kubernetes.io/projected/6e6612ef-ed63-43eb-a29c-a4dee4798be8-kube-api-access-pjrkl\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237626 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-config-data\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237649 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-scripts\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237682 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k482d\" (UniqueName: \"kubernetes.io/projected/97f22e9d-3a9b-420e-a97d-0421c447bebb-kube-api-access-k482d\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237707 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-public-tls-certs\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237736 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-internal-tls-certs\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237900 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-scripts\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.237987 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e6612ef-ed63-43eb-a29c-a4dee4798be8-logs\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.238107 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-internal-tls-certs\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340008 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-combined-ca-bundle\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340071 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-public-tls-certs\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340095 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-credential-keys\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340122 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjrkl\" (UniqueName: \"kubernetes.io/projected/6e6612ef-ed63-43eb-a29c-a4dee4798be8-kube-api-access-pjrkl\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340188 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-config-data\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340213 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-scripts\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340266 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k482d\" (UniqueName: \"kubernetes.io/projected/97f22e9d-3a9b-420e-a97d-0421c447bebb-kube-api-access-k482d\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340289 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-public-tls-certs\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340317 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-internal-tls-certs\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340363 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-scripts\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340392 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e6612ef-ed63-43eb-a29c-a4dee4798be8-logs\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340436 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-internal-tls-certs\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340486 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-fernet-keys\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340510 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-config-data\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.340530 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-combined-ca-bundle\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.342521 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e6612ef-ed63-43eb-a29c-a4dee4798be8-logs\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.345204 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-fernet-keys\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.346006 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-public-tls-certs\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.346401 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-internal-tls-certs\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.346842 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-combined-ca-bundle\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.348685 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-combined-ca-bundle\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.349135 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-credential-keys\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.349810 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-public-tls-certs\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.352646 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-internal-tls-certs\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.360920 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-scripts\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.362782 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-scripts\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.364493 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f22e9d-3a9b-420e-a97d-0421c447bebb-config-data\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.373995 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k482d\" (UniqueName: \"kubernetes.io/projected/97f22e9d-3a9b-420e-a97d-0421c447bebb-kube-api-access-k482d\") pod \"keystone-c99b79967-dmp47\" (UID: \"97f22e9d-3a9b-420e-a97d-0421c447bebb\") " pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.374063 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e6612ef-ed63-43eb-a29c-a4dee4798be8-config-data\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.383069 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjrkl\" (UniqueName: \"kubernetes.io/projected/6e6612ef-ed63-43eb-a29c-a4dee4798be8-kube-api-access-pjrkl\") pod \"placement-564d895c68-qq26m\" (UID: \"6e6612ef-ed63-43eb-a29c-a4dee4798be8\") " pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.488791 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:40 crc kubenswrapper[4788]: I1211 09:43:40.498547 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:41 crc kubenswrapper[4788]: I1211 09:43:41.013740 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-564d895c68-qq26m"] Dec 11 09:43:41 crc kubenswrapper[4788]: I1211 09:43:41.022512 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c99b79967-dmp47"] Dec 11 09:43:41 crc kubenswrapper[4788]: I1211 09:43:41.044012 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"ad77f0874ee20dfeae0afc792c5375955e12c7288049e909275fe93cff1938b9"} Dec 11 09:43:41 crc kubenswrapper[4788]: I1211 09:43:41.045927 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c99b79967-dmp47" event={"ID":"97f22e9d-3a9b-420e-a97d-0421c447bebb","Type":"ContainerStarted","Data":"f11e006352926ba49b978ad9d10f337e6540268360db4c76586a40bfaae1a041"} Dec 11 09:43:41 crc kubenswrapper[4788]: I1211 09:43:41.047331 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-564d895c68-qq26m" event={"ID":"6e6612ef-ed63-43eb-a29c-a4dee4798be8","Type":"ContainerStarted","Data":"a26a938e59213d48117d6af3229516912fa3e7a25779cf1d7252af7198ccaef5"} Dec 11 09:43:42 crc kubenswrapper[4788]: I1211 09:43:42.059079 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"a65157fc4768a00c507ca76b6ff01f976e374647e4082289dd2d6b5d7586c96f"} Dec 11 09:43:43 crc kubenswrapper[4788]: I1211 09:43:43.074209 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-564d895c68-qq26m" event={"ID":"6e6612ef-ed63-43eb-a29c-a4dee4798be8","Type":"ContainerStarted","Data":"0e01e4291132d6d53347efafb1521d850dd557f7e57ef0c771c63a5637b7fdd2"} Dec 11 09:43:43 crc kubenswrapper[4788]: I1211 09:43:43.077186 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c99b79967-dmp47" event={"ID":"97f22e9d-3a9b-420e-a97d-0421c447bebb","Type":"ContainerStarted","Data":"a5df239df301798b6ec74242ae07131ee38b46c44c793ff767317390fe90b7c9"} Dec 11 09:43:43 crc kubenswrapper[4788]: I1211 09:43:43.078475 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:43:43 crc kubenswrapper[4788]: I1211 09:43:43.111682 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-c99b79967-dmp47" podStartSLOduration=3.111656029 podStartE2EDuration="3.111656029s" podCreationTimestamp="2025-12-11 09:43:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:43:43.103862289 +0000 UTC m=+1353.174641875" watchObservedRunningTime="2025-12-11 09:43:43.111656029 +0000 UTC m=+1353.182435615" Dec 11 09:43:45 crc kubenswrapper[4788]: I1211 09:43:45.098087 4788 generic.go:334] "Generic (PLEG): container finished" podID="539632da-1b63-429c-9d5c-3be34a9457ad" containerID="ff2151bc49fd0cafd73c9fb2ceb0468e3d2614a208ebd9fe5817ecee62fe6e97" exitCode=0 Dec 11 09:43:45 crc kubenswrapper[4788]: I1211 09:43:45.098163 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bkcpt" event={"ID":"539632da-1b63-429c-9d5c-3be34a9457ad","Type":"ContainerDied","Data":"ff2151bc49fd0cafd73c9fb2ceb0468e3d2614a208ebd9fe5817ecee62fe6e97"} Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.128799 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bkcpt" event={"ID":"539632da-1b63-429c-9d5c-3be34a9457ad","Type":"ContainerDied","Data":"e7b2684dbbcd7e04da6b8cb4d4e7687d3d5bb449a35302c8f20ec8ee5006849c"} Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.129315 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7b2684dbbcd7e04da6b8cb4d4e7687d3d5bb449a35302c8f20ec8ee5006849c" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.131926 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.207069 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-combined-ca-bundle\") pod \"539632da-1b63-429c-9d5c-3be34a9457ad\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.207133 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-config-data\") pod \"539632da-1b63-429c-9d5c-3be34a9457ad\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.207164 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ggfb\" (UniqueName: \"kubernetes.io/projected/539632da-1b63-429c-9d5c-3be34a9457ad-kube-api-access-8ggfb\") pod \"539632da-1b63-429c-9d5c-3be34a9457ad\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.207249 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-scripts\") pod \"539632da-1b63-429c-9d5c-3be34a9457ad\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.207280 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-db-sync-config-data\") pod \"539632da-1b63-429c-9d5c-3be34a9457ad\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.208705 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/539632da-1b63-429c-9d5c-3be34a9457ad-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "539632da-1b63-429c-9d5c-3be34a9457ad" (UID: "539632da-1b63-429c-9d5c-3be34a9457ad"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.207588 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/539632da-1b63-429c-9d5c-3be34a9457ad-etc-machine-id\") pod \"539632da-1b63-429c-9d5c-3be34a9457ad\" (UID: \"539632da-1b63-429c-9d5c-3be34a9457ad\") " Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.210507 4788 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/539632da-1b63-429c-9d5c-3be34a9457ad-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.215579 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-scripts" (OuterVolumeSpecName: "scripts") pod "539632da-1b63-429c-9d5c-3be34a9457ad" (UID: "539632da-1b63-429c-9d5c-3be34a9457ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.220530 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "539632da-1b63-429c-9d5c-3be34a9457ad" (UID: "539632da-1b63-429c-9d5c-3be34a9457ad"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.231257 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/539632da-1b63-429c-9d5c-3be34a9457ad-kube-api-access-8ggfb" (OuterVolumeSpecName: "kube-api-access-8ggfb") pod "539632da-1b63-429c-9d5c-3be34a9457ad" (UID: "539632da-1b63-429c-9d5c-3be34a9457ad"). InnerVolumeSpecName "kube-api-access-8ggfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.254203 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "539632da-1b63-429c-9d5c-3be34a9457ad" (UID: "539632da-1b63-429c-9d5c-3be34a9457ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.269213 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-config-data" (OuterVolumeSpecName: "config-data") pod "539632da-1b63-429c-9d5c-3be34a9457ad" (UID: "539632da-1b63-429c-9d5c-3be34a9457ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.312666 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.312711 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.312723 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ggfb\" (UniqueName: \"kubernetes.io/projected/539632da-1b63-429c-9d5c-3be34a9457ad-kube-api-access-8ggfb\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.312736 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:48 crc kubenswrapper[4788]: I1211 09:43:48.312747 4788 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/539632da-1b63-429c-9d5c-3be34a9457ad-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.139332 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bkcpt" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.459494 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 11 09:43:49 crc kubenswrapper[4788]: E1211 09:43:49.460364 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="539632da-1b63-429c-9d5c-3be34a9457ad" containerName="cinder-db-sync" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.460389 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="539632da-1b63-429c-9d5c-3be34a9457ad" containerName="cinder-db-sync" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.460699 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="539632da-1b63-429c-9d5c-3be34a9457ad" containerName="cinder-db-sync" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.461949 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.466221 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.466397 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.466601 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.466833 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-hdstb" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.487300 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.556186 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9757b87d9-mjnx5"] Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.560877 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.587969 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9757b87d9-mjnx5"] Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.641517 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.641571 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-config\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.641622 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a74b5ace-181e-480f-a62a-c5877ffa9e62-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.641683 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-scripts\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.641746 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-nb\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.641811 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.641865 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.641921 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lbzl\" (UniqueName: \"kubernetes.io/projected/a74b5ace-181e-480f-a62a-c5877ffa9e62-kube-api-access-8lbzl\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.642001 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-sb\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.642092 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prh5h\" (UniqueName: \"kubernetes.io/projected/31f9d31b-6677-4e20-b6d5-ebd5d6467220-kube-api-access-prh5h\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.642363 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-dns-svc\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.743011 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.743355 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-config\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.743387 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a74b5ace-181e-480f-a62a-c5877ffa9e62-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.743423 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-scripts\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.743455 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-nb\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.743483 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.743510 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.743536 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lbzl\" (UniqueName: \"kubernetes.io/projected/a74b5ace-181e-480f-a62a-c5877ffa9e62-kube-api-access-8lbzl\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.743564 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-sb\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.743590 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prh5h\" (UniqueName: \"kubernetes.io/projected/31f9d31b-6677-4e20-b6d5-ebd5d6467220-kube-api-access-prh5h\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.743615 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-dns-svc\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.744534 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-dns-svc\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.746399 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a74b5ace-181e-480f-a62a-c5877ffa9e62-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.747315 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-config\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.748198 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-nb\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.748948 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-sb\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.750011 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.751967 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.755761 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.765917 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.780970 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.784624 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-scripts\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.785146 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.789915 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.789984 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lbzl\" (UniqueName: \"kubernetes.io/projected/a74b5ace-181e-480f-a62a-c5877ffa9e62-kube-api-access-8lbzl\") pod \"cinder-scheduler-0\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.803953 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prh5h\" (UniqueName: \"kubernetes.io/projected/31f9d31b-6677-4e20-b6d5-ebd5d6467220-kube-api-access-prh5h\") pod \"dnsmasq-dns-9757b87d9-mjnx5\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.845464 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.845513 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.845552 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ac200486-ecb7-44f5-8700-a4a917d4751c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.845694 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac200486-ecb7-44f5-8700-a4a917d4751c-logs\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.845768 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data-custom\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.845838 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkhpc\" (UniqueName: \"kubernetes.io/projected/ac200486-ecb7-44f5-8700-a4a917d4751c-kube-api-access-kkhpc\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.845861 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-scripts\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.947277 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac200486-ecb7-44f5-8700-a4a917d4751c-logs\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.947352 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data-custom\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.947402 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkhpc\" (UniqueName: \"kubernetes.io/projected/ac200486-ecb7-44f5-8700-a4a917d4751c-kube-api-access-kkhpc\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.947426 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-scripts\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.947468 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.947501 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.947548 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ac200486-ecb7-44f5-8700-a4a917d4751c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.947649 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ac200486-ecb7-44f5-8700-a4a917d4751c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.947718 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac200486-ecb7-44f5-8700-a4a917d4751c-logs\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.951294 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data-custom\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.951878 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-scripts\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.952219 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.956671 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.963543 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.972430 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkhpc\" (UniqueName: \"kubernetes.io/projected/ac200486-ecb7-44f5-8700-a4a917d4751c-kube-api-access-kkhpc\") pod \"cinder-api-0\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " pod="openstack/cinder-api-0" Dec 11 09:43:49 crc kubenswrapper[4788]: I1211 09:43:49.988987 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:50 crc kubenswrapper[4788]: E1211 09:43:50.033110 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"ceilometer-notification-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="420d2814-b791-4928-8ebf-691365abfec1" Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.105372 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.174146 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-scdh4" event={"ID":"169bf402-16b5-4aa7-838a-094a2e4c3330","Type":"ContainerStarted","Data":"bc7fb4a80917212a2ef2937c19727eb2738a9430480dc85502c129458f85a317"} Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.218198 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-scdh4" podStartSLOduration=3.799030931 podStartE2EDuration="1m21.218176163s" podCreationTimestamp="2025-12-11 09:42:29 +0000 UTC" firstStartedPulling="2025-12-11 09:42:32.087706998 +0000 UTC m=+1282.158486574" lastFinishedPulling="2025-12-11 09:43:49.50685222 +0000 UTC m=+1359.577631806" observedRunningTime="2025-12-11 09:43:50.203941028 +0000 UTC m=+1360.274720614" watchObservedRunningTime="2025-12-11 09:43:50.218176163 +0000 UTC m=+1360.288955749" Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.243218 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db769bcbd-mnjsv" event={"ID":"a9231129-6aaf-4d8e-83fe-cc79ba9d135b","Type":"ContainerStarted","Data":"72010a1f7a57b545f29717a8fe4161c71457bb11fdbaad6855c991d6da0ab1b2"} Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.256214 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84d5c869dd-hzg6f" event={"ID":"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251","Type":"ContainerStarted","Data":"1a28cf7f7ddd846f8fc3e5f7f33f353c7474a637ede7f99f287ee5e200699414"} Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.272863 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"ddbce1a2fa632aa30d843ecf9882eb9d0b3e190db5845729f950f05ae552bad2"} Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.276112 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-564d895c68-qq26m" event={"ID":"6e6612ef-ed63-43eb-a29c-a4dee4798be8","Type":"ContainerStarted","Data":"2afe49c9f0319e4de84aa21caff405ddd487bbc396a214c67de558c05aa689aa"} Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.276240 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.277703 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-564d895c68-qq26m" Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.290189 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"420d2814-b791-4928-8ebf-691365abfec1","Type":"ContainerStarted","Data":"177d702a92d665a0cfb6a1fd797f13f2998f383d5a00590af5598e5c09e765e6"} Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.290490 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="420d2814-b791-4928-8ebf-691365abfec1" containerName="sg-core" containerID="cri-o://852e99eecda7b3a4ac5b2061f808910328354db8ad86533ede4fff03cb48ae2a" gracePeriod=30 Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.290944 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.291058 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="420d2814-b791-4928-8ebf-691365abfec1" containerName="proxy-httpd" containerID="cri-o://177d702a92d665a0cfb6a1fd797f13f2998f383d5a00590af5598e5c09e765e6" gracePeriod=30 Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.348831 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-564d895c68-qq26m" podStartSLOduration=10.348810114 podStartE2EDuration="10.348810114s" podCreationTimestamp="2025-12-11 09:43:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:43:50.309756435 +0000 UTC m=+1360.380536021" watchObservedRunningTime="2025-12-11 09:43:50.348810114 +0000 UTC m=+1360.419589700" Dec 11 09:43:50 crc kubenswrapper[4788]: W1211 09:43:50.625917 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31f9d31b_6677_4e20_b6d5_ebd5d6467220.slice/crio-1e069a940a09bcc597eb64171c14a2cc00589cb58388979ac04b35d0bce46cad WatchSource:0}: Error finding container 1e069a940a09bcc597eb64171c14a2cc00589cb58388979ac04b35d0bce46cad: Status 404 returned error can't find the container with id 1e069a940a09bcc597eb64171c14a2cc00589cb58388979ac04b35d0bce46cad Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.662117 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9757b87d9-mjnx5"] Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.751989 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 11 09:43:50 crc kubenswrapper[4788]: I1211 09:43:50.816374 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.322440 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db769bcbd-mnjsv" event={"ID":"a9231129-6aaf-4d8e-83fe-cc79ba9d135b","Type":"ContainerStarted","Data":"1639642baf74b2716e381bba0e64b2517d8c67a0998e8c81296f78c615d937a7"} Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.341081 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84d5c869dd-hzg6f" event={"ID":"3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251","Type":"ContainerStarted","Data":"b37d514fa4deb84c73ca2c48adfff7df65a3ddc1fdf8ff694495c3bf90ec2f42"} Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.367516 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"f6351d4c9c6d73ea1e0485c84e7a488305163b7125c4572f8fd1b1544892cebe"} Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.368692 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.368722 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.374501 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7db769bcbd-mnjsv" podStartSLOduration=4.786958929 podStartE2EDuration="1m13.374480116s" podCreationTimestamp="2025-12-11 09:42:38 +0000 UTC" firstStartedPulling="2025-12-11 09:42:40.951635729 +0000 UTC m=+1291.022415315" lastFinishedPulling="2025-12-11 09:43:49.539156916 +0000 UTC m=+1359.609936502" observedRunningTime="2025-12-11 09:43:51.359312198 +0000 UTC m=+1361.430091794" watchObservedRunningTime="2025-12-11 09:43:51.374480116 +0000 UTC m=+1361.445259702" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.399868 4788 generic.go:334] "Generic (PLEG): container finished" podID="31f9d31b-6677-4e20-b6d5-ebd5d6467220" containerID="4d0703127bcdf686ca6d9b249d04fe2244d11d603aa85743c924f8dff0ae02e6" exitCode=0 Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.399933 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" event={"ID":"31f9d31b-6677-4e20-b6d5-ebd5d6467220","Type":"ContainerDied","Data":"4d0703127bcdf686ca6d9b249d04fe2244d11d603aa85743c924f8dff0ae02e6"} Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.399960 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" event={"ID":"31f9d31b-6677-4e20-b6d5-ebd5d6467220","Type":"ContainerStarted","Data":"1e069a940a09bcc597eb64171c14a2cc00589cb58388979ac04b35d0bce46cad"} Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.400192 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-84d5c869dd-hzg6f" podStartSLOduration=3.679040332 podStartE2EDuration="1m13.400174133s" podCreationTimestamp="2025-12-11 09:42:38 +0000 UTC" firstStartedPulling="2025-12-11 09:42:39.769059793 +0000 UTC m=+1289.839839379" lastFinishedPulling="2025-12-11 09:43:49.490193594 +0000 UTC m=+1359.560973180" observedRunningTime="2025-12-11 09:43:51.395451892 +0000 UTC m=+1361.466231488" watchObservedRunningTime="2025-12-11 09:43:51.400174133 +0000 UTC m=+1361.470953719" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.456547 4788 generic.go:334] "Generic (PLEG): container finished" podID="420d2814-b791-4928-8ebf-691365abfec1" containerID="177d702a92d665a0cfb6a1fd797f13f2998f383d5a00590af5598e5c09e765e6" exitCode=0 Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.456579 4788 generic.go:334] "Generic (PLEG): container finished" podID="420d2814-b791-4928-8ebf-691365abfec1" containerID="852e99eecda7b3a4ac5b2061f808910328354db8ad86533ede4fff03cb48ae2a" exitCode=2 Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.456629 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"420d2814-b791-4928-8ebf-691365abfec1","Type":"ContainerDied","Data":"177d702a92d665a0cfb6a1fd797f13f2998f383d5a00590af5598e5c09e765e6"} Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.456654 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"420d2814-b791-4928-8ebf-691365abfec1","Type":"ContainerDied","Data":"852e99eecda7b3a4ac5b2061f808910328354db8ad86533ede4fff03cb48ae2a"} Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.481412 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ac200486-ecb7-44f5-8700-a4a917d4751c","Type":"ContainerStarted","Data":"7d3350dac47aaeb1c1475770e8968514ae9b3f5ebd0fc5783490b6ab9c6e811b"} Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.493735 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a74b5ace-181e-480f-a62a-c5877ffa9e62","Type":"ContainerStarted","Data":"d008348452ead246c7a503e879976395513928030c5572facb6ddf25d4472453"} Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.578839 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.696851 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-run-httpd\") pod \"420d2814-b791-4928-8ebf-691365abfec1\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.697472 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-combined-ca-bundle\") pod \"420d2814-b791-4928-8ebf-691365abfec1\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.697510 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-config-data\") pod \"420d2814-b791-4928-8ebf-691365abfec1\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.697545 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-scripts\") pod \"420d2814-b791-4928-8ebf-691365abfec1\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.697620 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4sxx\" (UniqueName: \"kubernetes.io/projected/420d2814-b791-4928-8ebf-691365abfec1-kube-api-access-h4sxx\") pod \"420d2814-b791-4928-8ebf-691365abfec1\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.697664 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-sg-core-conf-yaml\") pod \"420d2814-b791-4928-8ebf-691365abfec1\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.697693 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-log-httpd\") pod \"420d2814-b791-4928-8ebf-691365abfec1\" (UID: \"420d2814-b791-4928-8ebf-691365abfec1\") " Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.697865 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "420d2814-b791-4928-8ebf-691365abfec1" (UID: "420d2814-b791-4928-8ebf-691365abfec1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.698806 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "420d2814-b791-4928-8ebf-691365abfec1" (UID: "420d2814-b791-4928-8ebf-691365abfec1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.699169 4788 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.699211 4788 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/420d2814-b791-4928-8ebf-691365abfec1-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.708608 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-scripts" (OuterVolumeSpecName: "scripts") pod "420d2814-b791-4928-8ebf-691365abfec1" (UID: "420d2814-b791-4928-8ebf-691365abfec1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.717346 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/420d2814-b791-4928-8ebf-691365abfec1-kube-api-access-h4sxx" (OuterVolumeSpecName: "kube-api-access-h4sxx") pod "420d2814-b791-4928-8ebf-691365abfec1" (UID: "420d2814-b791-4928-8ebf-691365abfec1"). InnerVolumeSpecName "kube-api-access-h4sxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.750476 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "420d2814-b791-4928-8ebf-691365abfec1" (UID: "420d2814-b791-4928-8ebf-691365abfec1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.755975 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "420d2814-b791-4928-8ebf-691365abfec1" (UID: "420d2814-b791-4928-8ebf-691365abfec1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.800818 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.801038 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.801049 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4sxx\" (UniqueName: \"kubernetes.io/projected/420d2814-b791-4928-8ebf-691365abfec1-kube-api-access-h4sxx\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.801060 4788 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.824625 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-config-data" (OuterVolumeSpecName: "config-data") pod "420d2814-b791-4928-8ebf-691365abfec1" (UID: "420d2814-b791-4928-8ebf-691365abfec1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:43:51 crc kubenswrapper[4788]: I1211 09:43:51.903921 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/420d2814-b791-4928-8ebf-691365abfec1-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.045402 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.515387 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"420d2814-b791-4928-8ebf-691365abfec1","Type":"ContainerDied","Data":"61c4d1b873be3019be6f1f49d97ed66aaa12439bdbf6606fa672ecd3dba01915"} Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.515728 4788 scope.go:117] "RemoveContainer" containerID="177d702a92d665a0cfb6a1fd797f13f2998f383d5a00590af5598e5c09e765e6" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.515460 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.515486 4788 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.548104 4788 scope.go:117] "RemoveContainer" containerID="852e99eecda7b3a4ac5b2061f808910328354db8ad86533ede4fff03cb48ae2a" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.645396 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.655673 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.664145 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:43:52 crc kubenswrapper[4788]: E1211 09:43:52.664664 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="420d2814-b791-4928-8ebf-691365abfec1" containerName="sg-core" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.664687 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="420d2814-b791-4928-8ebf-691365abfec1" containerName="sg-core" Dec 11 09:43:52 crc kubenswrapper[4788]: E1211 09:43:52.664738 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="420d2814-b791-4928-8ebf-691365abfec1" containerName="proxy-httpd" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.664748 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="420d2814-b791-4928-8ebf-691365abfec1" containerName="proxy-httpd" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.664979 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="420d2814-b791-4928-8ebf-691365abfec1" containerName="sg-core" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.665004 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="420d2814-b791-4928-8ebf-691365abfec1" containerName="proxy-httpd" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.667062 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.672581 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.672581 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.684698 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.826926 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-log-httpd\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.827009 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9wb7\" (UniqueName: \"kubernetes.io/projected/fe6f1eec-5386-445d-8700-eb6af297dc28-kube-api-access-t9wb7\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.827045 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.827079 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.827105 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-config-data\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.827167 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-scripts\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.827278 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-run-httpd\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.929099 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9wb7\" (UniqueName: \"kubernetes.io/projected/fe6f1eec-5386-445d-8700-eb6af297dc28-kube-api-access-t9wb7\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.929177 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.929256 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.929331 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-config-data\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.929800 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-scripts\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.929892 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-run-httpd\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.930002 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-log-httpd\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.930677 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-log-httpd\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.931575 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-run-httpd\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.937194 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.937495 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.943348 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-config-data\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.952625 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-scripts\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.953218 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9wb7\" (UniqueName: \"kubernetes.io/projected/fe6f1eec-5386-445d-8700-eb6af297dc28-kube-api-access-t9wb7\") pod \"ceilometer-0\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " pod="openstack/ceilometer-0" Dec 11 09:43:52 crc kubenswrapper[4788]: I1211 09:43:52.991463 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:43:54 crc kubenswrapper[4788]: I1211 09:43:54.117499 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:43:54 crc kubenswrapper[4788]: W1211 09:43:54.121862 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe6f1eec_5386_445d_8700_eb6af297dc28.slice/crio-3364c749c08f74544ea90fdc9cc2d4232f817908a9e16f3982d578603b9b5600 WatchSource:0}: Error finding container 3364c749c08f74544ea90fdc9cc2d4232f817908a9e16f3982d578603b9b5600: Status 404 returned error can't find the container with id 3364c749c08f74544ea90fdc9cc2d4232f817908a9e16f3982d578603b9b5600 Dec 11 09:43:54 crc kubenswrapper[4788]: I1211 09:43:54.510525 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="420d2814-b791-4928-8ebf-691365abfec1" path="/var/lib/kubelet/pods/420d2814-b791-4928-8ebf-691365abfec1/volumes" Dec 11 09:43:54 crc kubenswrapper[4788]: I1211 09:43:54.539010 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe6f1eec-5386-445d-8700-eb6af297dc28","Type":"ContainerStarted","Data":"3364c749c08f74544ea90fdc9cc2d4232f817908a9e16f3982d578603b9b5600"} Dec 11 09:43:57 crc kubenswrapper[4788]: I1211 09:43:57.568355 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" event={"ID":"31f9d31b-6677-4e20-b6d5-ebd5d6467220","Type":"ContainerStarted","Data":"ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866"} Dec 11 09:43:57 crc kubenswrapper[4788]: I1211 09:43:57.572276 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ac200486-ecb7-44f5-8700-a4a917d4751c","Type":"ContainerStarted","Data":"66df2455f6a399de3f08617c1be7b5ece192df9d629fc5aac6ef46336fbb67f1"} Dec 11 09:43:58 crc kubenswrapper[4788]: I1211 09:43:58.579952 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:43:58 crc kubenswrapper[4788]: I1211 09:43:58.606085 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" podStartSLOduration=9.606068054 podStartE2EDuration="9.606068054s" podCreationTimestamp="2025-12-11 09:43:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:43:58.597975446 +0000 UTC m=+1368.668755052" watchObservedRunningTime="2025-12-11 09:43:58.606068054 +0000 UTC m=+1368.676847640" Dec 11 09:43:59 crc kubenswrapper[4788]: I1211 09:43:59.292220 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:43:59 crc kubenswrapper[4788]: I1211 09:43:59.292707 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:43:59 crc kubenswrapper[4788]: I1211 09:43:59.969049 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-564d895c68-qq26m" Dec 11 09:44:00 crc kubenswrapper[4788]: I1211 09:44:00.404978 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:44:00 crc kubenswrapper[4788]: I1211 09:44:00.405126 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:44:00 crc kubenswrapper[4788]: I1211 09:44:00.408104 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7db769bcbd-mnjsv" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Dec 11 09:44:00 crc kubenswrapper[4788]: I1211 09:44:00.631929 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"c4271d3eaef3ebc406715e0ded57c6ea66eb27c9d6fdcc05245a4f4b7da4bad7"} Dec 11 09:44:00 crc kubenswrapper[4788]: I1211 09:44:00.635321 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe6f1eec-5386-445d-8700-eb6af297dc28","Type":"ContainerStarted","Data":"a5d62c3842c9a43047429c4ed55efc52c6fee3e7658747fec25160c2bd89cc3a"} Dec 11 09:44:01 crc kubenswrapper[4788]: I1211 09:44:01.665521 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-564d895c68-qq26m" Dec 11 09:44:01 crc kubenswrapper[4788]: I1211 09:44:01.671994 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a74b5ace-181e-480f-a62a-c5877ffa9e62","Type":"ContainerStarted","Data":"d6d56553bff5cb3c9407e9f4faa59907e6c863edd3b016168a4d3db8354cef48"} Dec 11 09:44:01 crc kubenswrapper[4788]: I1211 09:44:01.715565 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"31051c13dad6228426542b5151d1049d9a2963e10cd0dfb665cfb69f813309c8"} Dec 11 09:44:01 crc kubenswrapper[4788]: I1211 09:44:01.722291 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ac200486-ecb7-44f5-8700-a4a917d4751c","Type":"ContainerStarted","Data":"3ab4d8a37a6640fdbef87ecf806b719ae4db92ef4aa4c3f637a9272b668d1ba8"} Dec 11 09:44:01 crc kubenswrapper[4788]: I1211 09:44:01.722586 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerName="cinder-api-log" containerID="cri-o://66df2455f6a399de3f08617c1be7b5ece192df9d629fc5aac6ef46336fbb67f1" gracePeriod=30 Dec 11 09:44:01 crc kubenswrapper[4788]: I1211 09:44:01.722898 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 11 09:44:01 crc kubenswrapper[4788]: I1211 09:44:01.723167 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerName="cinder-api" containerID="cri-o://3ab4d8a37a6640fdbef87ecf806b719ae4db92ef4aa4c3f637a9272b668d1ba8" gracePeriod=30 Dec 11 09:44:01 crc kubenswrapper[4788]: I1211 09:44:01.771775 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=12.771756646 podStartE2EDuration="12.771756646s" podCreationTimestamp="2025-12-11 09:43:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:01.74915094 +0000 UTC m=+1371.819930546" watchObservedRunningTime="2025-12-11 09:44:01.771756646 +0000 UTC m=+1371.842536232" Dec 11 09:44:02 crc kubenswrapper[4788]: I1211 09:44:02.739465 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"a13d808e0173d17f275173703c818e0b390ef9c3d259f004c336f210249d5c6a"} Dec 11 09:44:02 crc kubenswrapper[4788]: I1211 09:44:02.743151 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe6f1eec-5386-445d-8700-eb6af297dc28","Type":"ContainerStarted","Data":"83f0a2be254451d17a9dd1b00b4a7f9898cb8f2c65b2b75d35540819837a6849"} Dec 11 09:44:02 crc kubenswrapper[4788]: I1211 09:44:02.762843 4788 generic.go:334] "Generic (PLEG): container finished" podID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerID="66df2455f6a399de3f08617c1be7b5ece192df9d629fc5aac6ef46336fbb67f1" exitCode=143 Dec 11 09:44:02 crc kubenswrapper[4788]: I1211 09:44:02.762936 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ac200486-ecb7-44f5-8700-a4a917d4751c","Type":"ContainerDied","Data":"66df2455f6a399de3f08617c1be7b5ece192df9d629fc5aac6ef46336fbb67f1"} Dec 11 09:44:02 crc kubenswrapper[4788]: I1211 09:44:02.771196 4788 generic.go:334] "Generic (PLEG): container finished" podID="ed639405-5b9d-491f-b673-3d0657a14ecf" containerID="99d7855546af3787a9a2209a97ca65d78e41ec27d469523f594380049193aa2a" exitCode=0 Dec 11 09:44:02 crc kubenswrapper[4788]: I1211 09:44:02.771507 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8k66n" event={"ID":"ed639405-5b9d-491f-b673-3d0657a14ecf","Type":"ContainerDied","Data":"99d7855546af3787a9a2209a97ca65d78e41ec27d469523f594380049193aa2a"} Dec 11 09:44:03 crc kubenswrapper[4788]: I1211 09:44:03.792259 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a74b5ace-181e-480f-a62a-c5877ffa9e62","Type":"ContainerStarted","Data":"99543d6c3a77fd0ecc62fb13a4a683462c46fc9d0453d2bf1fddc07a1f282ef1"} Dec 11 09:44:03 crc kubenswrapper[4788]: I1211 09:44:03.801337 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"a8e1195200db526e282fc0da6ca05e2dde62e7b520a9c21e7450a9901d2d0855"} Dec 11 09:44:03 crc kubenswrapper[4788]: I1211 09:44:03.801389 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"d382ce19ad298d86df3bac0e025166ad3f5d8e045bcddde36a6c4ad383e4c16b"} Dec 11 09:44:03 crc kubenswrapper[4788]: I1211 09:44:03.801401 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"4b43c7f90208e45bac32f9b5684a319b1533753b1b8a87862fe4631e94d6d3e1"} Dec 11 09:44:03 crc kubenswrapper[4788]: I1211 09:44:03.801414 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"874f552a-7856-439c-937c-a87d9c15305c","Type":"ContainerStarted","Data":"bca67376c857a2510af9143f438e2b666424fb21817f0c7688586f220c314fad"} Dec 11 09:44:03 crc kubenswrapper[4788]: I1211 09:44:03.805165 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe6f1eec-5386-445d-8700-eb6af297dc28","Type":"ContainerStarted","Data":"b7812425c7ced758d00e2282c878e846212cd1bb708712d18f9d65e00e83f43c"} Dec 11 09:44:03 crc kubenswrapper[4788]: I1211 09:44:03.814191 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.359722083 podStartE2EDuration="14.814168704s" podCreationTimestamp="2025-12-11 09:43:49 +0000 UTC" firstStartedPulling="2025-12-11 09:43:50.76507882 +0000 UTC m=+1360.835858416" lastFinishedPulling="2025-12-11 09:44:00.219525451 +0000 UTC m=+1370.290305037" observedRunningTime="2025-12-11 09:44:03.811137784 +0000 UTC m=+1373.881917370" watchObservedRunningTime="2025-12-11 09:44:03.814168704 +0000 UTC m=+1373.884948300" Dec 11 09:44:03 crc kubenswrapper[4788]: I1211 09:44:03.861586 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=147.339418076 podStartE2EDuration="2m54.861566526s" podCreationTimestamp="2025-12-11 09:41:09 +0000 UTC" firstStartedPulling="2025-12-11 09:43:32.60521129 +0000 UTC m=+1342.675990866" lastFinishedPulling="2025-12-11 09:44:00.12735974 +0000 UTC m=+1370.198139316" observedRunningTime="2025-12-11 09:44:03.853193451 +0000 UTC m=+1373.923973047" watchObservedRunningTime="2025-12-11 09:44:03.861566526 +0000 UTC m=+1373.932346112" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.164083 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9757b87d9-mjnx5"] Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.164303 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" podUID="31f9d31b-6677-4e20-b6d5-ebd5d6467220" containerName="dnsmasq-dns" containerID="cri-o://ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866" gracePeriod=10 Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.171433 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.257339 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-98f5744cf-z7rhr"] Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.259738 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.272627 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.274265 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98f5744cf-z7rhr"] Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.426678 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-swift-storage-0\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.426957 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-config\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.426980 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-nb\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.426998 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-sb\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.427284 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv45v\" (UniqueName: \"kubernetes.io/projected/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-kube-api-access-rv45v\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.427342 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-svc\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.528889 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-nb\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.528934 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-sb\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.529019 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv45v\" (UniqueName: \"kubernetes.io/projected/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-kube-api-access-rv45v\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.529040 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-svc\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.529098 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-swift-storage-0\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.529129 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-config\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.530124 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-config\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.530732 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-nb\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.531268 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-sb\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.532109 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-svc\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.532660 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-swift-storage-0\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.569211 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv45v\" (UniqueName: \"kubernetes.io/projected/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-kube-api-access-rv45v\") pod \"dnsmasq-dns-98f5744cf-z7rhr\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.666446 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.679606 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8k66n" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.691712 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.835177 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2vdj\" (UniqueName: \"kubernetes.io/projected/ed639405-5b9d-491f-b673-3d0657a14ecf-kube-api-access-r2vdj\") pod \"ed639405-5b9d-491f-b673-3d0657a14ecf\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.835295 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-nb\") pod \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.835426 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-combined-ca-bundle\") pod \"ed639405-5b9d-491f-b673-3d0657a14ecf\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.835494 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-config-data\") pod \"ed639405-5b9d-491f-b673-3d0657a14ecf\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.835566 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-db-sync-config-data\") pod \"ed639405-5b9d-491f-b673-3d0657a14ecf\" (UID: \"ed639405-5b9d-491f-b673-3d0657a14ecf\") " Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.835592 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prh5h\" (UniqueName: \"kubernetes.io/projected/31f9d31b-6677-4e20-b6d5-ebd5d6467220-kube-api-access-prh5h\") pod \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.835665 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-dns-svc\") pod \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.835686 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-config\") pod \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.835706 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-sb\") pod \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\" (UID: \"31f9d31b-6677-4e20-b6d5-ebd5d6467220\") " Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.857666 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed639405-5b9d-491f-b673-3d0657a14ecf-kube-api-access-r2vdj" (OuterVolumeSpecName: "kube-api-access-r2vdj") pod "ed639405-5b9d-491f-b673-3d0657a14ecf" (UID: "ed639405-5b9d-491f-b673-3d0657a14ecf"). InnerVolumeSpecName "kube-api-access-r2vdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.865506 4788 generic.go:334] "Generic (PLEG): container finished" podID="31f9d31b-6677-4e20-b6d5-ebd5d6467220" containerID="ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866" exitCode=0 Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.865579 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" event={"ID":"31f9d31b-6677-4e20-b6d5-ebd5d6467220","Type":"ContainerDied","Data":"ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866"} Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.865614 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" event={"ID":"31f9d31b-6677-4e20-b6d5-ebd5d6467220","Type":"ContainerDied","Data":"1e069a940a09bcc597eb64171c14a2cc00589cb58388979ac04b35d0bce46cad"} Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.865638 4788 scope.go:117] "RemoveContainer" containerID="ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.865779 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9757b87d9-mjnx5" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.876500 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ed639405-5b9d-491f-b673-3d0657a14ecf" (UID: "ed639405-5b9d-491f-b673-3d0657a14ecf"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.883499 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31f9d31b-6677-4e20-b6d5-ebd5d6467220-kube-api-access-prh5h" (OuterVolumeSpecName: "kube-api-access-prh5h") pod "31f9d31b-6677-4e20-b6d5-ebd5d6467220" (UID: "31f9d31b-6677-4e20-b6d5-ebd5d6467220"). InnerVolumeSpecName "kube-api-access-prh5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.884101 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8k66n" event={"ID":"ed639405-5b9d-491f-b673-3d0657a14ecf","Type":"ContainerDied","Data":"2ab9347f556f4c02219d258549c26c0e09b6eae66bea2f818c078f93ccc9baf6"} Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.887212 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ab9347f556f4c02219d258549c26c0e09b6eae66bea2f818c078f93ccc9baf6" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.884218 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8k66n" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.920529 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed639405-5b9d-491f-b673-3d0657a14ecf" (UID: "ed639405-5b9d-491f-b673-3d0657a14ecf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.937849 4788 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.937881 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prh5h\" (UniqueName: \"kubernetes.io/projected/31f9d31b-6677-4e20-b6d5-ebd5d6467220-kube-api-access-prh5h\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.937896 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2vdj\" (UniqueName: \"kubernetes.io/projected/ed639405-5b9d-491f-b673-3d0657a14ecf-kube-api-access-r2vdj\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.937908 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.960401 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.964093 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "31f9d31b-6677-4e20-b6d5-ebd5d6467220" (UID: "31f9d31b-6677-4e20-b6d5-ebd5d6467220"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.975880 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-config" (OuterVolumeSpecName: "config") pod "31f9d31b-6677-4e20-b6d5-ebd5d6467220" (UID: "31f9d31b-6677-4e20-b6d5-ebd5d6467220"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:04 crc kubenswrapper[4788]: I1211 09:44:04.989812 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-config-data" (OuterVolumeSpecName: "config-data") pod "ed639405-5b9d-491f-b673-3d0657a14ecf" (UID: "ed639405-5b9d-491f-b673-3d0657a14ecf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.004834 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "31f9d31b-6677-4e20-b6d5-ebd5d6467220" (UID: "31f9d31b-6677-4e20-b6d5-ebd5d6467220"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.014917 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "31f9d31b-6677-4e20-b6d5-ebd5d6467220" (UID: "31f9d31b-6677-4e20-b6d5-ebd5d6467220"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.040543 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.040585 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.040596 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.040608 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/31f9d31b-6677-4e20-b6d5-ebd5d6467220-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.040618 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed639405-5b9d-491f-b673-3d0657a14ecf-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.091379 4788 scope.go:117] "RemoveContainer" containerID="4d0703127bcdf686ca6d9b249d04fe2244d11d603aa85743c924f8dff0ae02e6" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.125443 4788 scope.go:117] "RemoveContainer" containerID="ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866" Dec 11 09:44:05 crc kubenswrapper[4788]: E1211 09:44:05.129403 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866\": container with ID starting with ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866 not found: ID does not exist" containerID="ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.129496 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866"} err="failed to get container status \"ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866\": rpc error: code = NotFound desc = could not find container \"ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866\": container with ID starting with ff7dd32d3111478c22f81ebda14c0742a2670e4afd9527d4bd9518c79457c866 not found: ID does not exist" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.129532 4788 scope.go:117] "RemoveContainer" containerID="4d0703127bcdf686ca6d9b249d04fe2244d11d603aa85743c924f8dff0ae02e6" Dec 11 09:44:05 crc kubenswrapper[4788]: E1211 09:44:05.133393 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d0703127bcdf686ca6d9b249d04fe2244d11d603aa85743c924f8dff0ae02e6\": container with ID starting with 4d0703127bcdf686ca6d9b249d04fe2244d11d603aa85743c924f8dff0ae02e6 not found: ID does not exist" containerID="4d0703127bcdf686ca6d9b249d04fe2244d11d603aa85743c924f8dff0ae02e6" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.133449 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d0703127bcdf686ca6d9b249d04fe2244d11d603aa85743c924f8dff0ae02e6"} err="failed to get container status \"4d0703127bcdf686ca6d9b249d04fe2244d11d603aa85743c924f8dff0ae02e6\": rpc error: code = NotFound desc = could not find container \"4d0703127bcdf686ca6d9b249d04fe2244d11d603aa85743c924f8dff0ae02e6\": container with ID starting with 4d0703127bcdf686ca6d9b249d04fe2244d11d603aa85743c924f8dff0ae02e6 not found: ID does not exist" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.217118 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9757b87d9-mjnx5"] Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.263304 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98f5744cf-z7rhr"] Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.278938 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9757b87d9-mjnx5"] Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.291527 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-8pgqz"] Dec 11 09:44:05 crc kubenswrapper[4788]: E1211 09:44:05.292029 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed639405-5b9d-491f-b673-3d0657a14ecf" containerName="glance-db-sync" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.292049 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed639405-5b9d-491f-b673-3d0657a14ecf" containerName="glance-db-sync" Dec 11 09:44:05 crc kubenswrapper[4788]: E1211 09:44:05.292069 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31f9d31b-6677-4e20-b6d5-ebd5d6467220" containerName="init" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.292078 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="31f9d31b-6677-4e20-b6d5-ebd5d6467220" containerName="init" Dec 11 09:44:05 crc kubenswrapper[4788]: E1211 09:44:05.292116 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31f9d31b-6677-4e20-b6d5-ebd5d6467220" containerName="dnsmasq-dns" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.292124 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="31f9d31b-6677-4e20-b6d5-ebd5d6467220" containerName="dnsmasq-dns" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.292407 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed639405-5b9d-491f-b673-3d0657a14ecf" containerName="glance-db-sync" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.292439 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="31f9d31b-6677-4e20-b6d5-ebd5d6467220" containerName="dnsmasq-dns" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.293722 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.300894 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-8pgqz"] Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.342366 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98f5744cf-z7rhr"] Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.447210 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlqfx\" (UniqueName: \"kubernetes.io/projected/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-kube-api-access-mlqfx\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.447537 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-config\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.447568 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-svc\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.447625 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-nb\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.447656 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-sb\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.447677 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-swift-storage-0\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.549679 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlqfx\" (UniqueName: \"kubernetes.io/projected/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-kube-api-access-mlqfx\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.549744 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-config\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.549780 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-svc\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.549852 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-nb\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.549918 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-sb\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.549939 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-swift-storage-0\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.550827 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-nb\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.550827 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-config\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.550903 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-sb\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.551035 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-svc\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.551092 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-swift-storage-0\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.570994 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlqfx\" (UniqueName: \"kubernetes.io/projected/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-kube-api-access-mlqfx\") pod \"dnsmasq-dns-5d6bd97c5-8pgqz\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.616471 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.905657 4788 generic.go:334] "Generic (PLEG): container finished" podID="6e233b2b-c8d6-42c5-a8d1-79be67787ff8" containerID="d7c778836190e3e738790e3fb785fb832890c76510c415cada3ac4b74eb04325" exitCode=0 Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.906029 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" event={"ID":"6e233b2b-c8d6-42c5-a8d1-79be67787ff8","Type":"ContainerDied","Data":"d7c778836190e3e738790e3fb785fb832890c76510c415cada3ac4b74eb04325"} Dec 11 09:44:05 crc kubenswrapper[4788]: I1211 09:44:05.906269 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" event={"ID":"6e233b2b-c8d6-42c5-a8d1-79be67787ff8","Type":"ContainerStarted","Data":"f89195c14bc416e917a8ad7caf1800cfb5d9ecec9b129ebb83d81759fc697169"} Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.140579 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-8pgqz"] Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.215789 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.220069 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.222934 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-49zhk" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.223206 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.223690 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.228401 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.267675 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.320304 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:44:06 crc kubenswrapper[4788]: E1211 09:44:06.320829 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e233b2b-c8d6-42c5-a8d1-79be67787ff8" containerName="init" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.320851 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e233b2b-c8d6-42c5-a8d1-79be67787ff8" containerName="init" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.321014 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e233b2b-c8d6-42c5-a8d1-79be67787ff8" containerName="init" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.321990 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.330431 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.338731 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.374925 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-swift-storage-0\") pod \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.375042 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-sb\") pod \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.375075 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-nb\") pod \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.375453 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-config\") pod \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.377052 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-svc\") pod \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.377097 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv45v\" (UniqueName: \"kubernetes.io/projected/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-kube-api-access-rv45v\") pod \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\" (UID: \"6e233b2b-c8d6-42c5-a8d1-79be67787ff8\") " Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.377961 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-config-data\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.378017 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-scripts\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.378073 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.378125 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46dh2\" (UniqueName: \"kubernetes.io/projected/89743535-ac96-4adc-ba59-a586e71dd880-kube-api-access-46dh2\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.378176 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-logs\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.378428 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.378480 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.383995 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-kube-api-access-rv45v" (OuterVolumeSpecName: "kube-api-access-rv45v") pod "6e233b2b-c8d6-42c5-a8d1-79be67787ff8" (UID: "6e233b2b-c8d6-42c5-a8d1-79be67787ff8"). InnerVolumeSpecName "kube-api-access-rv45v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.411076 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-config" (OuterVolumeSpecName: "config") pod "6e233b2b-c8d6-42c5-a8d1-79be67787ff8" (UID: "6e233b2b-c8d6-42c5-a8d1-79be67787ff8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.425765 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6e233b2b-c8d6-42c5-a8d1-79be67787ff8" (UID: "6e233b2b-c8d6-42c5-a8d1-79be67787ff8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.434137 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6e233b2b-c8d6-42c5-a8d1-79be67787ff8" (UID: "6e233b2b-c8d6-42c5-a8d1-79be67787ff8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.440762 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6e233b2b-c8d6-42c5-a8d1-79be67787ff8" (UID: "6e233b2b-c8d6-42c5-a8d1-79be67787ff8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.440875 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6e233b2b-c8d6-42c5-a8d1-79be67787ff8" (UID: "6e233b2b-c8d6-42c5-a8d1-79be67787ff8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480153 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480208 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480262 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480298 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-config-data\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480314 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480336 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-scripts\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480354 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480402 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480428 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46dh2\" (UniqueName: \"kubernetes.io/projected/89743535-ac96-4adc-ba59-a586e71dd880-kube-api-access-46dh2\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480455 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-logs\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480483 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-logs\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480503 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480517 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqv4l\" (UniqueName: \"kubernetes.io/projected/ad94c5a8-496d-420f-8c95-5412c16de875-kube-api-access-vqv4l\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480547 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.480638 4788 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.481019 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.481075 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.481287 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.481325 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.481337 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.481349 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv45v\" (UniqueName: \"kubernetes.io/projected/6e233b2b-c8d6-42c5-a8d1-79be67787ff8-kube-api-access-rv45v\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.481397 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.481533 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-logs\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.485207 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-config-data\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.485761 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-scripts\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.489044 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.501572 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46dh2\" (UniqueName: \"kubernetes.io/projected/89743535-ac96-4adc-ba59-a586e71dd880-kube-api-access-46dh2\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.504985 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31f9d31b-6677-4e20-b6d5-ebd5d6467220" path="/var/lib/kubelet/pods/31f9d31b-6677-4e20-b6d5-ebd5d6467220/volumes" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.514465 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.562169 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.583194 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.583285 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.583311 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.583362 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-logs\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.583384 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqv4l\" (UniqueName: \"kubernetes.io/projected/ad94c5a8-496d-420f-8c95-5412c16de875-kube-api-access-vqv4l\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.583397 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.583429 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.583929 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-logs\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.584256 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.584878 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.587245 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.587675 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.601281 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.606271 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqv4l\" (UniqueName: \"kubernetes.io/projected/ad94c5a8-496d-420f-8c95-5412c16de875-kube-api-access-vqv4l\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.636348 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.684901 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.925862 4788 generic.go:334] "Generic (PLEG): container finished" podID="169bf402-16b5-4aa7-838a-094a2e4c3330" containerID="bc7fb4a80917212a2ef2937c19727eb2738a9430480dc85502c129458f85a317" exitCode=0 Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.925954 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-scdh4" event={"ID":"169bf402-16b5-4aa7-838a-094a2e4c3330","Type":"ContainerDied","Data":"bc7fb4a80917212a2ef2937c19727eb2738a9430480dc85502c129458f85a317"} Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.928196 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.928202 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98f5744cf-z7rhr" event={"ID":"6e233b2b-c8d6-42c5-a8d1-79be67787ff8","Type":"ContainerDied","Data":"f89195c14bc416e917a8ad7caf1800cfb5d9ecec9b129ebb83d81759fc697169"} Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.928452 4788 scope.go:117] "RemoveContainer" containerID="d7c778836190e3e738790e3fb785fb832890c76510c415cada3ac4b74eb04325" Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.933616 4788 generic.go:334] "Generic (PLEG): container finished" podID="b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" containerID="3802a96c4ecbded7d453d61b2f777eab3d883b9f7481459f921664b50af459d4" exitCode=0 Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.933682 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" event={"ID":"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7","Type":"ContainerDied","Data":"3802a96c4ecbded7d453d61b2f777eab3d883b9f7481459f921664b50af459d4"} Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.933746 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" event={"ID":"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7","Type":"ContainerStarted","Data":"50dc21387e2b3e5266f24d1f8aeee5a49b0e8f3f590b4ab7f9b306f67101f971"} Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.937070 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe6f1eec-5386-445d-8700-eb6af297dc28","Type":"ContainerStarted","Data":"b57dc0472552391b3471ba9906bac0d7704efbb94a66e456534a331afbacfb67"} Dec 11 09:44:06 crc kubenswrapper[4788]: I1211 09:44:06.938029 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 11 09:44:07 crc kubenswrapper[4788]: I1211 09:44:07.001178 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.1646260330000002 podStartE2EDuration="15.001159812s" podCreationTimestamp="2025-12-11 09:43:52 +0000 UTC" firstStartedPulling="2025-12-11 09:43:54.124251877 +0000 UTC m=+1364.195031463" lastFinishedPulling="2025-12-11 09:44:05.960785656 +0000 UTC m=+1376.031565242" observedRunningTime="2025-12-11 09:44:06.997738502 +0000 UTC m=+1377.068518078" watchObservedRunningTime="2025-12-11 09:44:07.001159812 +0000 UTC m=+1377.071939388" Dec 11 09:44:07 crc kubenswrapper[4788]: I1211 09:44:07.061215 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98f5744cf-z7rhr"] Dec 11 09:44:07 crc kubenswrapper[4788]: I1211 09:44:07.075079 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-98f5744cf-z7rhr"] Dec 11 09:44:07 crc kubenswrapper[4788]: I1211 09:44:07.253306 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:44:07 crc kubenswrapper[4788]: W1211 09:44:07.257618 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89743535_ac96_4adc_ba59_a586e71dd880.slice/crio-7cf45c056939f4e0ebf779a5d75876da21574f0d5cf96068547be4ae8da3a8d3 WatchSource:0}: Error finding container 7cf45c056939f4e0ebf779a5d75876da21574f0d5cf96068547be4ae8da3a8d3: Status 404 returned error can't find the container with id 7cf45c056939f4e0ebf779a5d75876da21574f0d5cf96068547be4ae8da3a8d3 Dec 11 09:44:07 crc kubenswrapper[4788]: I1211 09:44:07.386247 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:44:07 crc kubenswrapper[4788]: I1211 09:44:07.968180 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" event={"ID":"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7","Type":"ContainerStarted","Data":"532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490"} Dec 11 09:44:07 crc kubenswrapper[4788]: I1211 09:44:07.968498 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:07 crc kubenswrapper[4788]: I1211 09:44:07.985497 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89743535-ac96-4adc-ba59-a586e71dd880","Type":"ContainerStarted","Data":"7cf45c056939f4e0ebf779a5d75876da21574f0d5cf96068547be4ae8da3a8d3"} Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.001505 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad94c5a8-496d-420f-8c95-5412c16de875","Type":"ContainerStarted","Data":"9352bab09b1685321ceb939202af7efee1047205485b3f998c4337074f9a1ddf"} Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.022446 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" podStartSLOduration=3.022426012 podStartE2EDuration="3.022426012s" podCreationTimestamp="2025-12-11 09:44:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:08.017053067 +0000 UTC m=+1378.087832653" watchObservedRunningTime="2025-12-11 09:44:08.022426012 +0000 UTC m=+1378.093205598" Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.395552 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-scdh4" Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.506653 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e233b2b-c8d6-42c5-a8d1-79be67787ff8" path="/var/lib/kubelet/pods/6e233b2b-c8d6-42c5-a8d1-79be67787ff8/volumes" Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.541422 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brn4t\" (UniqueName: \"kubernetes.io/projected/169bf402-16b5-4aa7-838a-094a2e4c3330-kube-api-access-brn4t\") pod \"169bf402-16b5-4aa7-838a-094a2e4c3330\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.541473 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-combined-ca-bundle\") pod \"169bf402-16b5-4aa7-838a-094a2e4c3330\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.541589 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-db-sync-config-data\") pod \"169bf402-16b5-4aa7-838a-094a2e4c3330\" (UID: \"169bf402-16b5-4aa7-838a-094a2e4c3330\") " Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.549453 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/169bf402-16b5-4aa7-838a-094a2e4c3330-kube-api-access-brn4t" (OuterVolumeSpecName: "kube-api-access-brn4t") pod "169bf402-16b5-4aa7-838a-094a2e4c3330" (UID: "169bf402-16b5-4aa7-838a-094a2e4c3330"). InnerVolumeSpecName "kube-api-access-brn4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.554661 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "169bf402-16b5-4aa7-838a-094a2e4c3330" (UID: "169bf402-16b5-4aa7-838a-094a2e4c3330"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.578091 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "169bf402-16b5-4aa7-838a-094a2e4c3330" (UID: "169bf402-16b5-4aa7-838a-094a2e4c3330"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.646266 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brn4t\" (UniqueName: \"kubernetes.io/projected/169bf402-16b5-4aa7-838a-094a2e4c3330-kube-api-access-brn4t\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.646306 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:08 crc kubenswrapper[4788]: I1211 09:44:08.646320 4788 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/169bf402-16b5-4aa7-838a-094a2e4c3330-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.080209 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-scdh4" event={"ID":"169bf402-16b5-4aa7-838a-094a2e4c3330","Type":"ContainerDied","Data":"599ac891ed3601e138d834c2490fcfc6f0dd26709b33b4b64cf531cdb7d0fd89"} Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.080537 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="599ac891ed3601e138d834c2490fcfc6f0dd26709b33b4b64cf531cdb7d0fd89" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.080606 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-scdh4" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.091318 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89743535-ac96-4adc-ba59-a586e71dd880","Type":"ContainerStarted","Data":"9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989"} Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.095154 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad94c5a8-496d-420f-8c95-5412c16de875","Type":"ContainerStarted","Data":"4b62b5404951a3b5a30ca64005c0d94a9af131982879e921a5c9fb63ef410775"} Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.194133 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-65644684d5-hmnmb"] Dec 11 09:44:09 crc kubenswrapper[4788]: E1211 09:44:09.194689 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="169bf402-16b5-4aa7-838a-094a2e4c3330" containerName="barbican-db-sync" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.194707 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="169bf402-16b5-4aa7-838a-094a2e4c3330" containerName="barbican-db-sync" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.194926 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="169bf402-16b5-4aa7-838a-094a2e4c3330" containerName="barbican-db-sync" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.196044 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.204188 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.210050 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-j88gj" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.210336 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.221996 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-65644684d5-hmnmb"] Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.282410 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-config-data\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.282679 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-logs\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.282802 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-config-data-custom\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.282916 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b4xq\" (UniqueName: \"kubernetes.io/projected/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-kube-api-access-2b4xq\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.283042 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-combined-ca-bundle\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.292297 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6fd98b95d6-wxbsw"] Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.294019 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.299057 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.302206 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6fd98b95d6-wxbsw"] Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.304640 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-84d5c869dd-hzg6f" podUID="3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.339867 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-8pgqz"] Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.392260 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b"] Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.392436 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e2739c9-c97e-4807-bde7-172073652810-config-data\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.393091 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-config-data\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.393737 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e2739c9-c97e-4807-bde7-172073652810-config-data-custom\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.393765 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e2739c9-c97e-4807-bde7-172073652810-logs\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.393921 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.393961 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-logs\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.394181 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-config-data-custom\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.394385 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2739c9-c97e-4807-bde7-172073652810-combined-ca-bundle\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.394607 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b4xq\" (UniqueName: \"kubernetes.io/projected/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-kube-api-access-2b4xq\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.394970 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-combined-ca-bundle\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.395182 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9vvc\" (UniqueName: \"kubernetes.io/projected/5e2739c9-c97e-4807-bde7-172073652810-kube-api-access-s9vvc\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.397063 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-logs\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.404218 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-config-data-custom\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.409421 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-combined-ca-bundle\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.426140 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-config-data\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.439105 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b"] Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.439864 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b4xq\" (UniqueName: \"kubernetes.io/projected/3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7-kube-api-access-2b4xq\") pod \"barbican-worker-65644684d5-hmnmb\" (UID: \"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7\") " pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.497512 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-nb\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.497835 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e2739c9-c97e-4807-bde7-172073652810-config-data\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.498003 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e2739c9-c97e-4807-bde7-172073652810-config-data-custom\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.498123 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e2739c9-c97e-4807-bde7-172073652810-logs\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.498215 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-svc\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.498838 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxttz\" (UniqueName: \"kubernetes.io/projected/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-kube-api-access-qxttz\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.498963 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2739c9-c97e-4807-bde7-172073652810-combined-ca-bundle\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.499065 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-config\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.499160 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-swift-storage-0\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.499313 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-sb\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.499505 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9vvc\" (UniqueName: \"kubernetes.io/projected/5e2739c9-c97e-4807-bde7-172073652810-kube-api-access-s9vvc\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.502964 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e2739c9-c97e-4807-bde7-172073652810-combined-ca-bundle\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.505338 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e2739c9-c97e-4807-bde7-172073652810-logs\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.507488 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e2739c9-c97e-4807-bde7-172073652810-config-data-custom\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.507554 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e2739c9-c97e-4807-bde7-172073652810-config-data\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.519308 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-8495b6dc44-4jlzn"] Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.521344 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.532901 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.538741 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9vvc\" (UniqueName: \"kubernetes.io/projected/5e2739c9-c97e-4807-bde7-172073652810-kube-api-access-s9vvc\") pod \"barbican-keystone-listener-6fd98b95d6-wxbsw\" (UID: \"5e2739c9-c97e-4807-bde7-172073652810\") " pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.544788 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-8495b6dc44-4jlzn"] Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.561225 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-65644684d5-hmnmb" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.602834 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-sb\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.603002 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-nb\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.603207 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-svc\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.603350 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxttz\" (UniqueName: \"kubernetes.io/projected/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-kube-api-access-qxttz\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.603446 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-config\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.603509 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-swift-storage-0\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.604015 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-sb\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.605071 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-swift-storage-0\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.607692 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-config\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.608875 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-svc\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.612928 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-nb\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.621186 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.631054 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxttz\" (UniqueName: \"kubernetes.io/projected/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-kube-api-access-qxttz\") pod \"dnsmasq-dns-5cc8b5d5c5-fzl5b\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.706488 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.706545 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjspj\" (UniqueName: \"kubernetes.io/projected/bc4edfe8-5936-4f7e-892e-49a685d1081b-kube-api-access-rjspj\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.706656 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data-custom\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.706732 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-combined-ca-bundle\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.706838 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc4edfe8-5936-4f7e-892e-49a685d1081b-logs\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.758363 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.810679 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.810749 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjspj\" (UniqueName: \"kubernetes.io/projected/bc4edfe8-5936-4f7e-892e-49a685d1081b-kube-api-access-rjspj\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.810870 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data-custom\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.810909 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-combined-ca-bundle\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.810990 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc4edfe8-5936-4f7e-892e-49a685d1081b-logs\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.811545 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc4edfe8-5936-4f7e-892e-49a685d1081b-logs\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.819596 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.819904 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-combined-ca-bundle\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.820187 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data-custom\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.843735 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjspj\" (UniqueName: \"kubernetes.io/projected/bc4edfe8-5936-4f7e-892e-49a685d1081b-kube-api-access-rjspj\") pod \"barbican-api-8495b6dc44-4jlzn\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.859406 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.920347 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:09 crc kubenswrapper[4788]: I1211 09:44:09.930100 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.139615 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad94c5a8-496d-420f-8c95-5412c16de875","Type":"ContainerStarted","Data":"4bf9ee208fa3b8513c65a05834413c0a2ac2c85d4feb9c124cc19f37eaf3b6f1"} Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.154783 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" podUID="b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" containerName="dnsmasq-dns" containerID="cri-o://532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490" gracePeriod=10 Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.155244 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89743535-ac96-4adc-ba59-a586e71dd880","Type":"ContainerStarted","Data":"560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e"} Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.172561 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-65644684d5-hmnmb"] Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.187000 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.186980151 podStartE2EDuration="5.186980151s" podCreationTimestamp="2025-12-11 09:44:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:10.164976569 +0000 UTC m=+1380.235756175" watchObservedRunningTime="2025-12-11 09:44:10.186980151 +0000 UTC m=+1380.257759737" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.189158 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.205943 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.205923231 podStartE2EDuration="5.205923231s" podCreationTimestamp="2025-12-11 09:44:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:10.204624991 +0000 UTC m=+1380.275404577" watchObservedRunningTime="2025-12-11 09:44:10.205923231 +0000 UTC m=+1380.276702817" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.335856 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.370184 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6fd98b95d6-wxbsw"] Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.407364 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7db769bcbd-mnjsv" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.418355 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.608427 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b"] Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.616094 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-8495b6dc44-4jlzn"] Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.759396 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.848679 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlqfx\" (UniqueName: \"kubernetes.io/projected/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-kube-api-access-mlqfx\") pod \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.849005 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-config\") pod \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.849024 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-swift-storage-0\") pod \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.849055 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-nb\") pod \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.849189 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-svc\") pod \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.849259 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-sb\") pod \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.859950 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-kube-api-access-mlqfx" (OuterVolumeSpecName: "kube-api-access-mlqfx") pod "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" (UID: "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7"). InnerVolumeSpecName "kube-api-access-mlqfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.943057 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-config" (OuterVolumeSpecName: "config") pod "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" (UID: "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.948344 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" (UID: "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.950558 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" (UID: "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.951159 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-nb\") pod \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\" (UID: \"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7\") " Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.951699 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlqfx\" (UniqueName: \"kubernetes.io/projected/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-kube-api-access-mlqfx\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.951718 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.951729 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:10 crc kubenswrapper[4788]: W1211 09:44:10.951798 4788 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7/volumes/kubernetes.io~configmap/ovsdbserver-nb Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.951809 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" (UID: "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:10 crc kubenswrapper[4788]: I1211 09:44:10.981916 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" (UID: "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.020214 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" (UID: "b65bcdfb-50fa-404f-a9cc-0511b1e9dce7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.053613 4788 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.053667 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.053680 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.181466 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" event={"ID":"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652","Type":"ContainerStarted","Data":"c2d2011886acacf667886365914d097fd3dd1f13f5c8d7d84e70203dcb51ec3f"} Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.182989 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8495b6dc44-4jlzn" event={"ID":"bc4edfe8-5936-4f7e-892e-49a685d1081b","Type":"ContainerStarted","Data":"91a20a837a2bdec6ca4ea9905969497dbce057cb58e473cbfcbd478ea092b75d"} Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.185289 4788 generic.go:334] "Generic (PLEG): container finished" podID="b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" containerID="532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490" exitCode=0 Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.185357 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" event={"ID":"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7","Type":"ContainerDied","Data":"532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490"} Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.185380 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" event={"ID":"b65bcdfb-50fa-404f-a9cc-0511b1e9dce7","Type":"ContainerDied","Data":"50dc21387e2b3e5266f24d1f8aeee5a49b0e8f3f590b4ab7f9b306f67101f971"} Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.185381 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d6bd97c5-8pgqz" Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.185397 4788 scope.go:117] "RemoveContainer" containerID="532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490" Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.191348 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" event={"ID":"5e2739c9-c97e-4807-bde7-172073652810","Type":"ContainerStarted","Data":"2b290f8eb4ca80fe1d1aad2ad96218f0d73c6a59ea4d0ba094cba211c9e281f4"} Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.195584 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="89743535-ac96-4adc-ba59-a586e71dd880" containerName="glance-log" containerID="cri-o://9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989" gracePeriod=30 Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.195693 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="89743535-ac96-4adc-ba59-a586e71dd880" containerName="glance-httpd" containerID="cri-o://560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e" gracePeriod=30 Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.195815 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65644684d5-hmnmb" event={"ID":"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7","Type":"ContainerStarted","Data":"35daf3af9d7c89d36eccf2fae8a3f27d111e0052995bd57d1892cef9be257f6a"} Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.195836 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="a74b5ace-181e-480f-a62a-c5877ffa9e62" containerName="cinder-scheduler" containerID="cri-o://d6d56553bff5cb3c9407e9f4faa59907e6c863edd3b016168a4d3db8354cef48" gracePeriod=30 Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.195886 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="a74b5ace-181e-480f-a62a-c5877ffa9e62" containerName="probe" containerID="cri-o://99543d6c3a77fd0ecc62fb13a4a683462c46fc9d0453d2bf1fddc07a1f282ef1" gracePeriod=30 Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.196147 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ad94c5a8-496d-420f-8c95-5412c16de875" containerName="glance-log" containerID="cri-o://4b62b5404951a3b5a30ca64005c0d94a9af131982879e921a5c9fb63ef410775" gracePeriod=30 Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.196335 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ad94c5a8-496d-420f-8c95-5412c16de875" containerName="glance-httpd" containerID="cri-o://4bf9ee208fa3b8513c65a05834413c0a2ac2c85d4feb9c124cc19f37eaf3b6f1" gracePeriod=30 Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.241155 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-8pgqz"] Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.249735 4788 scope.go:117] "RemoveContainer" containerID="3802a96c4ecbded7d453d61b2f777eab3d883b9f7481459f921664b50af459d4" Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.250157 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-8pgqz"] Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.289334 4788 scope.go:117] "RemoveContainer" containerID="532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490" Dec 11 09:44:11 crc kubenswrapper[4788]: E1211 09:44:11.289834 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490\": container with ID starting with 532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490 not found: ID does not exist" containerID="532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490" Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.289886 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490"} err="failed to get container status \"532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490\": rpc error: code = NotFound desc = could not find container \"532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490\": container with ID starting with 532efeadc37f1c94fb9140b7f49b039f30f81f7364f5dfd0f8983d4678e16490 not found: ID does not exist" Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.289921 4788 scope.go:117] "RemoveContainer" containerID="3802a96c4ecbded7d453d61b2f777eab3d883b9f7481459f921664b50af459d4" Dec 11 09:44:11 crc kubenswrapper[4788]: E1211 09:44:11.294543 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3802a96c4ecbded7d453d61b2f777eab3d883b9f7481459f921664b50af459d4\": container with ID starting with 3802a96c4ecbded7d453d61b2f777eab3d883b9f7481459f921664b50af459d4 not found: ID does not exist" containerID="3802a96c4ecbded7d453d61b2f777eab3d883b9f7481459f921664b50af459d4" Dec 11 09:44:11 crc kubenswrapper[4788]: I1211 09:44:11.294597 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3802a96c4ecbded7d453d61b2f777eab3d883b9f7481459f921664b50af459d4"} err="failed to get container status \"3802a96c4ecbded7d453d61b2f777eab3d883b9f7481459f921664b50af459d4\": rpc error: code = NotFound desc = could not find container \"3802a96c4ecbded7d453d61b2f777eab3d883b9f7481459f921664b50af459d4\": container with ID starting with 3802a96c4ecbded7d453d61b2f777eab3d883b9f7481459f921664b50af459d4 not found: ID does not exist" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.203871 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.245281 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8495b6dc44-4jlzn" event={"ID":"bc4edfe8-5936-4f7e-892e-49a685d1081b","Type":"ContainerStarted","Data":"96e995953b29cfeb30a0dfecf22b26dfeb84d8c0bf2cd378eeba26e5bf71e2a5"} Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.245326 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8495b6dc44-4jlzn" event={"ID":"bc4edfe8-5936-4f7e-892e-49a685d1081b","Type":"ContainerStarted","Data":"61982536d201c64530ed5c611d1bf705bb3a0b0a910f23d0e16bdfcd895f175c"} Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.246658 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.246684 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.292524 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-8495b6dc44-4jlzn" podStartSLOduration=3.292503596 podStartE2EDuration="3.292503596s" podCreationTimestamp="2025-12-11 09:44:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:12.274326294 +0000 UTC m=+1382.345105890" watchObservedRunningTime="2025-12-11 09:44:12.292503596 +0000 UTC m=+1382.363283182" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.308210 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-config-data\") pod \"89743535-ac96-4adc-ba59-a586e71dd880\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.308343 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46dh2\" (UniqueName: \"kubernetes.io/projected/89743535-ac96-4adc-ba59-a586e71dd880-kube-api-access-46dh2\") pod \"89743535-ac96-4adc-ba59-a586e71dd880\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.308436 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-httpd-run\") pod \"89743535-ac96-4adc-ba59-a586e71dd880\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.308511 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-logs\") pod \"89743535-ac96-4adc-ba59-a586e71dd880\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.308570 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-scripts\") pod \"89743535-ac96-4adc-ba59-a586e71dd880\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.308615 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-combined-ca-bundle\") pod \"89743535-ac96-4adc-ba59-a586e71dd880\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.308644 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"89743535-ac96-4adc-ba59-a586e71dd880\" (UID: \"89743535-ac96-4adc-ba59-a586e71dd880\") " Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.311839 4788 generic.go:334] "Generic (PLEG): container finished" podID="89743535-ac96-4adc-ba59-a586e71dd880" containerID="560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e" exitCode=0 Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.311869 4788 generic.go:334] "Generic (PLEG): container finished" podID="89743535-ac96-4adc-ba59-a586e71dd880" containerID="9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989" exitCode=143 Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.311933 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89743535-ac96-4adc-ba59-a586e71dd880","Type":"ContainerDied","Data":"560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e"} Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.311963 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89743535-ac96-4adc-ba59-a586e71dd880","Type":"ContainerDied","Data":"9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989"} Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.311976 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"89743535-ac96-4adc-ba59-a586e71dd880","Type":"ContainerDied","Data":"7cf45c056939f4e0ebf779a5d75876da21574f0d5cf96068547be4ae8da3a8d3"} Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.311994 4788 scope.go:117] "RemoveContainer" containerID="560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.312124 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.313299 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-logs" (OuterVolumeSpecName: "logs") pod "89743535-ac96-4adc-ba59-a586e71dd880" (UID: "89743535-ac96-4adc-ba59-a586e71dd880"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.313573 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "89743535-ac96-4adc-ba59-a586e71dd880" (UID: "89743535-ac96-4adc-ba59-a586e71dd880"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.333485 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-scripts" (OuterVolumeSpecName: "scripts") pod "89743535-ac96-4adc-ba59-a586e71dd880" (UID: "89743535-ac96-4adc-ba59-a586e71dd880"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.345045 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89743535-ac96-4adc-ba59-a586e71dd880-kube-api-access-46dh2" (OuterVolumeSpecName: "kube-api-access-46dh2") pod "89743535-ac96-4adc-ba59-a586e71dd880" (UID: "89743535-ac96-4adc-ba59-a586e71dd880"). InnerVolumeSpecName "kube-api-access-46dh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.345127 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "89743535-ac96-4adc-ba59-a586e71dd880" (UID: "89743535-ac96-4adc-ba59-a586e71dd880"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.366503 4788 generic.go:334] "Generic (PLEG): container finished" podID="ad94c5a8-496d-420f-8c95-5412c16de875" containerID="4bf9ee208fa3b8513c65a05834413c0a2ac2c85d4feb9c124cc19f37eaf3b6f1" exitCode=0 Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.366541 4788 generic.go:334] "Generic (PLEG): container finished" podID="ad94c5a8-496d-420f-8c95-5412c16de875" containerID="4b62b5404951a3b5a30ca64005c0d94a9af131982879e921a5c9fb63ef410775" exitCode=143 Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.366604 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad94c5a8-496d-420f-8c95-5412c16de875","Type":"ContainerDied","Data":"4bf9ee208fa3b8513c65a05834413c0a2ac2c85d4feb9c124cc19f37eaf3b6f1"} Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.366631 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad94c5a8-496d-420f-8c95-5412c16de875","Type":"ContainerDied","Data":"4b62b5404951a3b5a30ca64005c0d94a9af131982879e921a5c9fb63ef410775"} Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.386165 4788 generic.go:334] "Generic (PLEG): container finished" podID="5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" containerID="4635c2d31878d3ffff94d142d5e4cfa69263e18cdd760b9af8a4abac6336eb26" exitCode=0 Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.386208 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" event={"ID":"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652","Type":"ContainerDied","Data":"4635c2d31878d3ffff94d142d5e4cfa69263e18cdd760b9af8a4abac6336eb26"} Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.386794 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-config-data" (OuterVolumeSpecName: "config-data") pod "89743535-ac96-4adc-ba59-a586e71dd880" (UID: "89743535-ac96-4adc-ba59-a586e71dd880"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.412325 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.412353 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.412407 4788 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.412417 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.412426 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46dh2\" (UniqueName: \"kubernetes.io/projected/89743535-ac96-4adc-ba59-a586e71dd880-kube-api-access-46dh2\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.412436 4788 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/89743535-ac96-4adc-ba59-a586e71dd880-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.421483 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89743535-ac96-4adc-ba59-a586e71dd880" (UID: "89743535-ac96-4adc-ba59-a586e71dd880"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.435148 4788 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.514660 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89743535-ac96-4adc-ba59-a586e71dd880-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.514687 4788 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.527859 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" path="/var/lib/kubelet/pods/b65bcdfb-50fa-404f-a9cc-0511b1e9dce7/volumes" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.642269 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.663103 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.676994 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:44:12 crc kubenswrapper[4788]: E1211 09:44:12.679426 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" containerName="dnsmasq-dns" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.679467 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" containerName="dnsmasq-dns" Dec 11 09:44:12 crc kubenswrapper[4788]: E1211 09:44:12.679495 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89743535-ac96-4adc-ba59-a586e71dd880" containerName="glance-httpd" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.679508 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="89743535-ac96-4adc-ba59-a586e71dd880" containerName="glance-httpd" Dec 11 09:44:12 crc kubenswrapper[4788]: E1211 09:44:12.679522 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89743535-ac96-4adc-ba59-a586e71dd880" containerName="glance-log" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.679531 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="89743535-ac96-4adc-ba59-a586e71dd880" containerName="glance-log" Dec 11 09:44:12 crc kubenswrapper[4788]: E1211 09:44:12.679568 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" containerName="init" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.679576 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" containerName="init" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.679926 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b65bcdfb-50fa-404f-a9cc-0511b1e9dce7" containerName="dnsmasq-dns" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.679958 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="89743535-ac96-4adc-ba59-a586e71dd880" containerName="glance-httpd" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.679975 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="89743535-ac96-4adc-ba59-a586e71dd880" containerName="glance-log" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.681374 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.692341 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.692613 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.693453 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.913561 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.913617 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-logs\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.913666 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.913704 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-config-data\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.913723 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.913792 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.913814 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-scripts\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:12 crc kubenswrapper[4788]: I1211 09:44:12.913868 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9p49k\" (UniqueName: \"kubernetes.io/projected/02366a9b-897c-4132-9981-b1c4a5dc7fc7-kube-api-access-9p49k\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.015761 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.015832 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-scripts\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.015885 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9p49k\" (UniqueName: \"kubernetes.io/projected/02366a9b-897c-4132-9981-b1c4a5dc7fc7-kube-api-access-9p49k\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.015915 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.015935 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-logs\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.015977 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.016023 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-config-data\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.016058 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.017093 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.018936 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.019199 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-logs\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.026261 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.026798 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.027090 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-scripts\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.046110 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-config-data\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.057466 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9p49k\" (UniqueName: \"kubernetes.io/projected/02366a9b-897c-4132-9981-b1c4a5dc7fc7-kube-api-access-9p49k\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.072541 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.185061 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.410682 4788 generic.go:334] "Generic (PLEG): container finished" podID="a74b5ace-181e-480f-a62a-c5877ffa9e62" containerID="99543d6c3a77fd0ecc62fb13a4a683462c46fc9d0453d2bf1fddc07a1f282ef1" exitCode=0 Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.411798 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a74b5ace-181e-480f-a62a-c5877ffa9e62","Type":"ContainerDied","Data":"99543d6c3a77fd0ecc62fb13a4a683462c46fc9d0453d2bf1fddc07a1f282ef1"} Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.455298 4788 scope.go:117] "RemoveContainer" containerID="9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.568697 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.631926 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-scripts\") pod \"ad94c5a8-496d-420f-8c95-5412c16de875\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.632033 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-httpd-run\") pod \"ad94c5a8-496d-420f-8c95-5412c16de875\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.632075 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqv4l\" (UniqueName: \"kubernetes.io/projected/ad94c5a8-496d-420f-8c95-5412c16de875-kube-api-access-vqv4l\") pod \"ad94c5a8-496d-420f-8c95-5412c16de875\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.632180 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-combined-ca-bundle\") pod \"ad94c5a8-496d-420f-8c95-5412c16de875\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.632318 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-config-data\") pod \"ad94c5a8-496d-420f-8c95-5412c16de875\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.632349 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-logs\") pod \"ad94c5a8-496d-420f-8c95-5412c16de875\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.632375 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ad94c5a8-496d-420f-8c95-5412c16de875\" (UID: \"ad94c5a8-496d-420f-8c95-5412c16de875\") " Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.632812 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ad94c5a8-496d-420f-8c95-5412c16de875" (UID: "ad94c5a8-496d-420f-8c95-5412c16de875"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.633072 4788 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.633191 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-logs" (OuterVolumeSpecName: "logs") pod "ad94c5a8-496d-420f-8c95-5412c16de875" (UID: "ad94c5a8-496d-420f-8c95-5412c16de875"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.647432 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-scripts" (OuterVolumeSpecName: "scripts") pod "ad94c5a8-496d-420f-8c95-5412c16de875" (UID: "ad94c5a8-496d-420f-8c95-5412c16de875"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.647462 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "ad94c5a8-496d-420f-8c95-5412c16de875" (UID: "ad94c5a8-496d-420f-8c95-5412c16de875"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.647439 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad94c5a8-496d-420f-8c95-5412c16de875-kube-api-access-vqv4l" (OuterVolumeSpecName: "kube-api-access-vqv4l") pod "ad94c5a8-496d-420f-8c95-5412c16de875" (UID: "ad94c5a8-496d-420f-8c95-5412c16de875"). InnerVolumeSpecName "kube-api-access-vqv4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.683598 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad94c5a8-496d-420f-8c95-5412c16de875" (UID: "ad94c5a8-496d-420f-8c95-5412c16de875"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.734574 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.734807 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad94c5a8-496d-420f-8c95-5412c16de875-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.734843 4788 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.734852 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.734863 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqv4l\" (UniqueName: \"kubernetes.io/projected/ad94c5a8-496d-420f-8c95-5412c16de875-kube-api-access-vqv4l\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.757779 4788 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.765094 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-config-data" (OuterVolumeSpecName: "config-data") pod "ad94c5a8-496d-420f-8c95-5412c16de875" (UID: "ad94c5a8-496d-420f-8c95-5412c16de875"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.836381 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad94c5a8-496d-420f-8c95-5412c16de875-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.836425 4788 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:13 crc kubenswrapper[4788]: I1211 09:44:13.880856 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.112614 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-65f8fcd946-pfr68"] Dec 11 09:44:14 crc kubenswrapper[4788]: E1211 09:44:14.119276 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad94c5a8-496d-420f-8c95-5412c16de875" containerName="glance-httpd" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.119610 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad94c5a8-496d-420f-8c95-5412c16de875" containerName="glance-httpd" Dec 11 09:44:14 crc kubenswrapper[4788]: E1211 09:44:14.119997 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad94c5a8-496d-420f-8c95-5412c16de875" containerName="glance-log" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.120075 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad94c5a8-496d-420f-8c95-5412c16de875" containerName="glance-log" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.120497 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad94c5a8-496d-420f-8c95-5412c16de875" containerName="glance-httpd" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.120619 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad94c5a8-496d-420f-8c95-5412c16de875" containerName="glance-log" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.128174 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.134637 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.134941 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.155346 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-65f8fcd946-pfr68"] Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.254025 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-config-data\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.254102 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq7l2\" (UniqueName: \"kubernetes.io/projected/f4638f63-07df-47e5-942d-3061f2162f08-kube-api-access-sq7l2\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.254131 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-config-data-custom\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.254152 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4638f63-07df-47e5-942d-3061f2162f08-logs\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.254210 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-internal-tls-certs\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.254377 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-public-tls-certs\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.254513 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-combined-ca-bundle\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.318388 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-c99b79967-dmp47" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.356872 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-internal-tls-certs\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.356932 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-public-tls-certs\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.357055 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-combined-ca-bundle\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.357119 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-config-data\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.357152 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq7l2\" (UniqueName: \"kubernetes.io/projected/f4638f63-07df-47e5-942d-3061f2162f08-kube-api-access-sq7l2\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.357178 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-config-data-custom\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.357199 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4638f63-07df-47e5-942d-3061f2162f08-logs\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.357868 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4638f63-07df-47e5-942d-3061f2162f08-logs\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.363425 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-internal-tls-certs\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.364951 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-public-tls-certs\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.365873 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-config-data\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.367567 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-combined-ca-bundle\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.377387 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq7l2\" (UniqueName: \"kubernetes.io/projected/f4638f63-07df-47e5-942d-3061f2162f08-kube-api-access-sq7l2\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.381115 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f4638f63-07df-47e5-942d-3061f2162f08-config-data-custom\") pod \"barbican-api-65f8fcd946-pfr68\" (UID: \"f4638f63-07df-47e5-942d-3061f2162f08\") " pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.433574 4788 generic.go:334] "Generic (PLEG): container finished" podID="32f6b143-7465-4311-85d7-d7668fde477c" containerID="86b8455f9f4b87b9234f4d8fe45e0647e65a9a6431e22dc6688e5d81c3af5966" exitCode=0 Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.433662 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bwvb7" event={"ID":"32f6b143-7465-4311-85d7-d7668fde477c","Type":"ContainerDied","Data":"86b8455f9f4b87b9234f4d8fe45e0647e65a9a6431e22dc6688e5d81c3af5966"} Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.439645 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ad94c5a8-496d-420f-8c95-5412c16de875","Type":"ContainerDied","Data":"9352bab09b1685321ceb939202af7efee1047205485b3f998c4337074f9a1ddf"} Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.439780 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.481314 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.527581 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89743535-ac96-4adc-ba59-a586e71dd880" path="/var/lib/kubelet/pods/89743535-ac96-4adc-ba59-a586e71dd880/volumes" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.532799 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.540051 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.554782 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.556927 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.563203 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.563595 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.576573 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.636038 4788 scope.go:117] "RemoveContainer" containerID="560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e" Dec 11 09:44:14 crc kubenswrapper[4788]: E1211 09:44:14.637547 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e\": container with ID starting with 560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e not found: ID does not exist" containerID="560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.637590 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e"} err="failed to get container status \"560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e\": rpc error: code = NotFound desc = could not find container \"560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e\": container with ID starting with 560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e not found: ID does not exist" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.637636 4788 scope.go:117] "RemoveContainer" containerID="9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989" Dec 11 09:44:14 crc kubenswrapper[4788]: E1211 09:44:14.638146 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989\": container with ID starting with 9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989 not found: ID does not exist" containerID="9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.638195 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989"} err="failed to get container status \"9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989\": rpc error: code = NotFound desc = could not find container \"9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989\": container with ID starting with 9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989 not found: ID does not exist" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.638213 4788 scope.go:117] "RemoveContainer" containerID="560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.638504 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e"} err="failed to get container status \"560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e\": rpc error: code = NotFound desc = could not find container \"560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e\": container with ID starting with 560816e57f301fce01f1597f6abcaade1fa3af9721b415f2ed00342e9d67000e not found: ID does not exist" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.638545 4788 scope.go:117] "RemoveContainer" containerID="9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.638745 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989"} err="failed to get container status \"9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989\": rpc error: code = NotFound desc = could not find container \"9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989\": container with ID starting with 9fbeb186b04caa3da029b3b5fa9a996a845e28744ed4298bf6425a28e5756989 not found: ID does not exist" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.638826 4788 scope.go:117] "RemoveContainer" containerID="4bf9ee208fa3b8513c65a05834413c0a2ac2c85d4feb9c124cc19f37eaf3b6f1" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.663539 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.664416 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.664480 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.664538 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-logs\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.664567 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4v79\" (UniqueName: \"kubernetes.io/projected/c0cff116-6827-47f2-82d4-0193ebcd2b86-kube-api-access-k4v79\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.664593 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.664727 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.664764 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.766390 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.766463 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.766493 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.766527 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-logs\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.766549 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4v79\" (UniqueName: \"kubernetes.io/projected/c0cff116-6827-47f2-82d4-0193ebcd2b86-kube-api-access-k4v79\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.766573 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.766651 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.766679 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.767188 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.768461 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-logs\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.772744 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.774934 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.777297 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.789342 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.790546 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.800083 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4v79\" (UniqueName: \"kubernetes.io/projected/c0cff116-6827-47f2-82d4-0193ebcd2b86-kube-api-access-k4v79\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.804228 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.878493 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:14 crc kubenswrapper[4788]: I1211 09:44:14.929602 4788 scope.go:117] "RemoveContainer" containerID="4b62b5404951a3b5a30ca64005c0d94a9af131982879e921a5c9fb63ef410775" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.438597 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.498295 4788 generic.go:334] "Generic (PLEG): container finished" podID="a74b5ace-181e-480f-a62a-c5877ffa9e62" containerID="d6d56553bff5cb3c9407e9f4faa59907e6c863edd3b016168a4d3db8354cef48" exitCode=0 Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.498370 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a74b5ace-181e-480f-a62a-c5877ffa9e62","Type":"ContainerDied","Data":"d6d56553bff5cb3c9407e9f4faa59907e6c863edd3b016168a4d3db8354cef48"} Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.498401 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a74b5ace-181e-480f-a62a-c5877ffa9e62","Type":"ContainerDied","Data":"d008348452ead246c7a503e879976395513928030c5572facb6ddf25d4472453"} Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.498414 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d008348452ead246c7a503e879976395513928030c5572facb6ddf25d4472453" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.510892 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.517409 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02366a9b-897c-4132-9981-b1c4a5dc7fc7","Type":"ContainerStarted","Data":"8fe1d4c9e9ad7e7842296d89177f1422d780ec5e9ba7ca3fe0c4be0a81352569"} Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.544017 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" event={"ID":"5e2739c9-c97e-4807-bde7-172073652810","Type":"ContainerStarted","Data":"026068ed32e0e0041f5bfba6466c230c5f047584f1f1025b011b8d56cdb515d3"} Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.558033 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65644684d5-hmnmb" event={"ID":"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7","Type":"ContainerStarted","Data":"19ad32e5b64754f8e7f6c58221d509e6dd7b8059cb1e1ec88e10129173274d67"} Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.572277 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" event={"ID":"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652","Type":"ContainerStarted","Data":"b30e19848fed5fa5fb0b9043f88323e6c228936c8a126be38a058c2128ca8891"} Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.572395 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.596554 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a74b5ace-181e-480f-a62a-c5877ffa9e62-etc-machine-id\") pod \"a74b5ace-181e-480f-a62a-c5877ffa9e62\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.596742 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lbzl\" (UniqueName: \"kubernetes.io/projected/a74b5ace-181e-480f-a62a-c5877ffa9e62-kube-api-access-8lbzl\") pod \"a74b5ace-181e-480f-a62a-c5877ffa9e62\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.596917 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-combined-ca-bundle\") pod \"a74b5ace-181e-480f-a62a-c5877ffa9e62\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.597247 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-scripts\") pod \"a74b5ace-181e-480f-a62a-c5877ffa9e62\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.597317 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data-custom\") pod \"a74b5ace-181e-480f-a62a-c5877ffa9e62\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.597349 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data\") pod \"a74b5ace-181e-480f-a62a-c5877ffa9e62\" (UID: \"a74b5ace-181e-480f-a62a-c5877ffa9e62\") " Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.598787 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a74b5ace-181e-480f-a62a-c5877ffa9e62-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a74b5ace-181e-480f-a62a-c5877ffa9e62" (UID: "a74b5ace-181e-480f-a62a-c5877ffa9e62"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.602442 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-65f8fcd946-pfr68"] Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.610796 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" podStartSLOduration=6.610775205 podStartE2EDuration="6.610775205s" podCreationTimestamp="2025-12-11 09:44:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:15.598700954 +0000 UTC m=+1385.669480560" watchObservedRunningTime="2025-12-11 09:44:15.610775205 +0000 UTC m=+1385.681554791" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.612815 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-scripts" (OuterVolumeSpecName: "scripts") pod "a74b5ace-181e-480f-a62a-c5877ffa9e62" (UID: "a74b5ace-181e-480f-a62a-c5877ffa9e62"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.613241 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a74b5ace-181e-480f-a62a-c5877ffa9e62-kube-api-access-8lbzl" (OuterVolumeSpecName: "kube-api-access-8lbzl") pod "a74b5ace-181e-480f-a62a-c5877ffa9e62" (UID: "a74b5ace-181e-480f-a62a-c5877ffa9e62"). InnerVolumeSpecName "kube-api-access-8lbzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.613682 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a74b5ace-181e-480f-a62a-c5877ffa9e62" (UID: "a74b5ace-181e-480f-a62a-c5877ffa9e62"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.676114 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a74b5ace-181e-480f-a62a-c5877ffa9e62" (UID: "a74b5ace-181e-480f-a62a-c5877ffa9e62"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.701939 4788 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a74b5ace-181e-480f-a62a-c5877ffa9e62-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.701965 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lbzl\" (UniqueName: \"kubernetes.io/projected/a74b5ace-181e-480f-a62a-c5877ffa9e62-kube-api-access-8lbzl\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.701975 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.701985 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:15 crc kubenswrapper[4788]: I1211 09:44:15.701993 4788 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.013713 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data" (OuterVolumeSpecName: "config-data") pod "a74b5ace-181e-480f-a62a-c5877ffa9e62" (UID: "a74b5ace-181e-480f-a62a-c5877ffa9e62"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.018495 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a74b5ace-181e-480f-a62a-c5877ffa9e62-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.099520 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.282655 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.426342 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fk6sz\" (UniqueName: \"kubernetes.io/projected/32f6b143-7465-4311-85d7-d7668fde477c-kube-api-access-fk6sz\") pod \"32f6b143-7465-4311-85d7-d7668fde477c\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.426543 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-config\") pod \"32f6b143-7465-4311-85d7-d7668fde477c\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.426706 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-combined-ca-bundle\") pod \"32f6b143-7465-4311-85d7-d7668fde477c\" (UID: \"32f6b143-7465-4311-85d7-d7668fde477c\") " Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.441922 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32f6b143-7465-4311-85d7-d7668fde477c-kube-api-access-fk6sz" (OuterVolumeSpecName: "kube-api-access-fk6sz") pod "32f6b143-7465-4311-85d7-d7668fde477c" (UID: "32f6b143-7465-4311-85d7-d7668fde477c"). InnerVolumeSpecName "kube-api-access-fk6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.490357 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-config" (OuterVolumeSpecName: "config") pod "32f6b143-7465-4311-85d7-d7668fde477c" (UID: "32f6b143-7465-4311-85d7-d7668fde477c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.512385 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32f6b143-7465-4311-85d7-d7668fde477c" (UID: "32f6b143-7465-4311-85d7-d7668fde477c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.515802 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad94c5a8-496d-420f-8c95-5412c16de875" path="/var/lib/kubelet/pods/ad94c5a8-496d-420f-8c95-5412c16de875/volumes" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.530742 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.530798 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fk6sz\" (UniqueName: \"kubernetes.io/projected/32f6b143-7465-4311-85d7-d7668fde477c-kube-api-access-fk6sz\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.530815 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/32f6b143-7465-4311-85d7-d7668fde477c-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.666861 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" event={"ID":"5e2739c9-c97e-4807-bde7-172073652810","Type":"ContainerStarted","Data":"4d805dd5ed9741d00052ee1b530d7a9ab6cc28874203ba718969eb6db4e20449"} Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.689838 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-65644684d5-hmnmb" event={"ID":"3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7","Type":"ContainerStarted","Data":"5668eef3b9c0bb30138baa267d09f716d41a083af7ba69ed045aad3986321d9a"} Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.716798 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bwvb7" event={"ID":"32f6b143-7465-4311-85d7-d7668fde477c","Type":"ContainerDied","Data":"60c388bf45ebfc8ba0e5ce5859275377d54bc1843a03d658ea7b3f862ced7790"} Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.716852 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60c388bf45ebfc8ba0e5ce5859275377d54bc1843a03d658ea7b3f862ced7790" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.716928 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bwvb7" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.730528 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c0cff116-6827-47f2-82d4-0193ebcd2b86","Type":"ContainerStarted","Data":"61d85d0ef657bfea851c9078acff9faf980f82fc705418b639f40a85a78136b9"} Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.732215 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6fd98b95d6-wxbsw" podStartSLOduration=3.301906936 podStartE2EDuration="7.732191774s" podCreationTimestamp="2025-12-11 09:44:09 +0000 UTC" firstStartedPulling="2025-12-11 09:44:10.358836406 +0000 UTC m=+1380.429615992" lastFinishedPulling="2025-12-11 09:44:14.789121244 +0000 UTC m=+1384.859900830" observedRunningTime="2025-12-11 09:44:16.704778087 +0000 UTC m=+1386.775557673" watchObservedRunningTime="2025-12-11 09:44:16.732191774 +0000 UTC m=+1386.802971360" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.773325 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65f8fcd946-pfr68" event={"ID":"f4638f63-07df-47e5-942d-3061f2162f08","Type":"ContainerStarted","Data":"c435e4aeafa10362eeda77362a9365be5796f871e985246531bacafde8adb7b6"} Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.773380 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65f8fcd946-pfr68" event={"ID":"f4638f63-07df-47e5-942d-3061f2162f08","Type":"ContainerStarted","Data":"7ee717b92d9f21c095baa4ede75326b74e209e5e41d8dc43b566793eb857863c"} Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.773395 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65f8fcd946-pfr68" event={"ID":"f4638f63-07df-47e5-942d-3061f2162f08","Type":"ContainerStarted","Data":"1712f13b22fc0af6f1cc93ee9e39b07b389e52e19b56666586a4552ea60f008f"} Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.773439 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.773500 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.775626 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-65644684d5-hmnmb" podStartSLOduration=3.283627771 podStartE2EDuration="7.775600703s" podCreationTimestamp="2025-12-11 09:44:09 +0000 UTC" firstStartedPulling="2025-12-11 09:44:10.188179039 +0000 UTC m=+1380.258958625" lastFinishedPulling="2025-12-11 09:44:14.680151971 +0000 UTC m=+1384.750931557" observedRunningTime="2025-12-11 09:44:16.740005726 +0000 UTC m=+1386.810785312" watchObservedRunningTime="2025-12-11 09:44:16.775600703 +0000 UTC m=+1386.846380299" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.813029 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.816738 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b"] Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.816774 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02366a9b-897c-4132-9981-b1c4a5dc7fc7","Type":"ContainerStarted","Data":"9bb68d2a7ef6e2ba858a24a9a2e19ba74c9aa8bc07842ae64aee1fd42f531c21"} Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.831423 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-dwrpb"] Dec 11 09:44:16 crc kubenswrapper[4788]: E1211 09:44:16.831779 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a74b5ace-181e-480f-a62a-c5877ffa9e62" containerName="probe" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.831796 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a74b5ace-181e-480f-a62a-c5877ffa9e62" containerName="probe" Dec 11 09:44:16 crc kubenswrapper[4788]: E1211 09:44:16.831813 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a74b5ace-181e-480f-a62a-c5877ffa9e62" containerName="cinder-scheduler" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.831820 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a74b5ace-181e-480f-a62a-c5877ffa9e62" containerName="cinder-scheduler" Dec 11 09:44:16 crc kubenswrapper[4788]: E1211 09:44:16.831853 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32f6b143-7465-4311-85d7-d7668fde477c" containerName="neutron-db-sync" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.831858 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="32f6b143-7465-4311-85d7-d7668fde477c" containerName="neutron-db-sync" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.832079 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="a74b5ace-181e-480f-a62a-c5877ffa9e62" containerName="probe" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.832099 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="32f6b143-7465-4311-85d7-d7668fde477c" containerName="neutron-db-sync" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.832114 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="a74b5ace-181e-480f-a62a-c5877ffa9e62" containerName="cinder-scheduler" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.833079 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.840215 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-65f8fcd946-pfr68" podStartSLOduration=2.840193185 podStartE2EDuration="2.840193185s" podCreationTimestamp="2025-12-11 09:44:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:16.803569854 +0000 UTC m=+1386.874349440" watchObservedRunningTime="2025-12-11 09:44:16.840193185 +0000 UTC m=+1386.910972771" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.869474 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-dwrpb"] Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.927368 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.951879 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-svc\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.952032 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-config\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.952064 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.952119 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.952165 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.952268 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsv7c\" (UniqueName: \"kubernetes.io/projected/1e46d499-ff77-4620-8c50-dcee4ac3af39-kube-api-access-xsv7c\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.980546 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.989581 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.992708 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 11 09:44:16 crc kubenswrapper[4788]: I1211 09:44:16.996722 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.008851 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055437 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055506 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-config\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055525 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-scripts\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055544 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055578 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055599 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055618 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055654 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mst6q\" (UniqueName: \"kubernetes.io/projected/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-kube-api-access-mst6q\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055677 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsv7c\" (UniqueName: \"kubernetes.io/projected/1e46d499-ff77-4620-8c50-dcee4ac3af39-kube-api-access-xsv7c\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055698 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-config-data\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055727 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-svc\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.055749 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.056717 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-config\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.057030 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.057559 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-svc\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.057726 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.058026 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.070788 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dbdd8b984-2qbs8"] Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.072790 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.085119 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.085344 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-6xftg" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.085620 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsv7c\" (UniqueName: \"kubernetes.io/projected/1e46d499-ff77-4620-8c50-dcee4ac3af39-kube-api-access-xsv7c\") pod \"dnsmasq-dns-6578955fd5-dwrpb\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.085849 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.085965 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.099418 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dbdd8b984-2qbs8"] Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.158601 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-config\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.158649 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.158676 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-ovndb-tls-certs\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.158700 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cpt6\" (UniqueName: \"kubernetes.io/projected/b81fec6d-3244-43be-8a39-922194f72574-kube-api-access-7cpt6\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.158724 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-combined-ca-bundle\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.158747 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mst6q\" (UniqueName: \"kubernetes.io/projected/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-kube-api-access-mst6q\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.158778 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-config-data\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.158952 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.159166 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.159247 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-scripts\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.159293 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-httpd-config\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.159579 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.167091 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.169132 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-scripts\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.169786 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-config-data\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.178075 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.189918 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mst6q\" (UniqueName: \"kubernetes.io/projected/b29f4b83-e1ef-49cf-82eb-e7f080c7b28b-kube-api-access-mst6q\") pod \"cinder-scheduler-0\" (UID: \"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b\") " pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.263475 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-httpd-config\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.263847 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-config\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.263964 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-ovndb-tls-certs\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.264092 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cpt6\" (UniqueName: \"kubernetes.io/projected/b81fec6d-3244-43be-8a39-922194f72574-kube-api-access-7cpt6\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.264198 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-combined-ca-bundle\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.272630 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-httpd-config\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.273880 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-combined-ca-bundle\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.274129 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-ovndb-tls-certs\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.285321 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-config\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.289528 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.298264 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cpt6\" (UniqueName: \"kubernetes.io/projected/b81fec6d-3244-43be-8a39-922194f72574-kube-api-access-7cpt6\") pod \"neutron-dbdd8b984-2qbs8\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.349719 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.460765 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.838014 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c0cff116-6827-47f2-82d4-0193ebcd2b86","Type":"ContainerStarted","Data":"e807a7af6c8f63517b2560bd8a264b070d35c7806daed7f9013aa41ab45b18c2"} Dec 11 09:44:17 crc kubenswrapper[4788]: I1211 09:44:17.838582 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" podUID="5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" containerName="dnsmasq-dns" containerID="cri-o://b30e19848fed5fa5fb0b9043f88323e6c228936c8a126be38a058c2128ca8891" gracePeriod=10 Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.137833 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-dwrpb"] Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.365993 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 11 09:44:18 crc kubenswrapper[4788]: W1211 09:44:18.409532 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb29f4b83_e1ef_49cf_82eb_e7f080c7b28b.slice/crio-e049ca25ddfdaf316eaba0b32e628ed804bc7ce64cfbf7542388003a6a003a82 WatchSource:0}: Error finding container e049ca25ddfdaf316eaba0b32e628ed804bc7ce64cfbf7542388003a6a003a82: Status 404 returned error can't find the container with id e049ca25ddfdaf316eaba0b32e628ed804bc7ce64cfbf7542388003a6a003a82 Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.512524 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a74b5ace-181e-480f-a62a-c5877ffa9e62" path="/var/lib/kubelet/pods/a74b5ace-181e-480f-a62a-c5877ffa9e62/volumes" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.578292 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.579636 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.582810 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.585517 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-rkf7t" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.586067 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.625018 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4bae27c1-f73d-4bdb-91a2-185dd601bc33-openstack-config\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.625124 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bae27c1-f73d-4bdb-91a2-185dd601bc33-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.625208 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bxh5\" (UniqueName: \"kubernetes.io/projected/4bae27c1-f73d-4bdb-91a2-185dd601bc33-kube-api-access-5bxh5\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.625407 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4bae27c1-f73d-4bdb-91a2-185dd601bc33-openstack-config-secret\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.627299 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.668987 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dbdd8b984-2qbs8"] Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.727412 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bae27c1-f73d-4bdb-91a2-185dd601bc33-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.727474 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bxh5\" (UniqueName: \"kubernetes.io/projected/4bae27c1-f73d-4bdb-91a2-185dd601bc33-kube-api-access-5bxh5\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.727549 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4bae27c1-f73d-4bdb-91a2-185dd601bc33-openstack-config-secret\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.727656 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4bae27c1-f73d-4bdb-91a2-185dd601bc33-openstack-config\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.737418 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4bae27c1-f73d-4bdb-91a2-185dd601bc33-openstack-config\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.743021 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bae27c1-f73d-4bdb-91a2-185dd601bc33-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.747592 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4bae27c1-f73d-4bdb-91a2-185dd601bc33-openstack-config-secret\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.753557 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bxh5\" (UniqueName: \"kubernetes.io/projected/4bae27c1-f73d-4bdb-91a2-185dd601bc33-kube-api-access-5bxh5\") pod \"openstackclient\" (UID: \"4bae27c1-f73d-4bdb-91a2-185dd601bc33\") " pod="openstack/openstackclient" Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.892708 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b","Type":"ContainerStarted","Data":"e049ca25ddfdaf316eaba0b32e628ed804bc7ce64cfbf7542388003a6a003a82"} Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.894511 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdd8b984-2qbs8" event={"ID":"b81fec6d-3244-43be-8a39-922194f72574","Type":"ContainerStarted","Data":"e4a3366f02969ec0cdafc0d1d9e1b36b50072a79edd80089aab615344932743d"} Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.913803 4788 generic.go:334] "Generic (PLEG): container finished" podID="5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" containerID="b30e19848fed5fa5fb0b9043f88323e6c228936c8a126be38a058c2128ca8891" exitCode=0 Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.913895 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" event={"ID":"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652","Type":"ContainerDied","Data":"b30e19848fed5fa5fb0b9043f88323e6c228936c8a126be38a058c2128ca8891"} Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.958515 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02366a9b-897c-4132-9981-b1c4a5dc7fc7","Type":"ContainerStarted","Data":"039dd34b99f4c9129348ce12f05acbc36d6c337d228d567f2c975150e7ca3a60"} Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.978974 4788 generic.go:334] "Generic (PLEG): container finished" podID="1e46d499-ff77-4620-8c50-dcee4ac3af39" containerID="654a738631f1e4467114a63eb810993afd615b4365eedef1eb14daaafe4b4de0" exitCode=0 Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.979027 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" event={"ID":"1e46d499-ff77-4620-8c50-dcee4ac3af39","Type":"ContainerDied","Data":"654a738631f1e4467114a63eb810993afd615b4365eedef1eb14daaafe4b4de0"} Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.979056 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" event={"ID":"1e46d499-ff77-4620-8c50-dcee4ac3af39","Type":"ContainerStarted","Data":"494388ea419506afceb3e270afb35680db8a365419d952455401409d2c9a3a8b"} Dec 11 09:44:18 crc kubenswrapper[4788]: I1211 09:44:18.981385 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.013740 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.013721562 podStartE2EDuration="7.013721562s" podCreationTimestamp="2025-12-11 09:44:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:18.996621904 +0000 UTC m=+1389.067401500" watchObservedRunningTime="2025-12-11 09:44:19.013721562 +0000 UTC m=+1389.084501158" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.332141 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-84d5c869dd-hzg6f" podUID="3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.356666 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.473075 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-config\") pod \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.473214 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-swift-storage-0\") pod \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.473257 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-svc\") pod \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.473297 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxttz\" (UniqueName: \"kubernetes.io/projected/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-kube-api-access-qxttz\") pod \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.473331 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-sb\") pod \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.473360 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-nb\") pod \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\" (UID: \"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652\") " Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.526464 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-kube-api-access-qxttz" (OuterVolumeSpecName: "kube-api-access-qxttz") pod "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" (UID: "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652"). InnerVolumeSpecName "kube-api-access-qxttz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.576982 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxttz\" (UniqueName: \"kubernetes.io/projected/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-kube-api-access-qxttz\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.593730 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" (UID: "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.594361 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" (UID: "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.618115 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-config" (OuterVolumeSpecName: "config") pod "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" (UID: "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.654218 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" (UID: "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.678852 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.678966 4788 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.678987 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.679005 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.689506 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.758750 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" (UID: "5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:19 crc kubenswrapper[4788]: I1211 09:44:19.782407 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.017468 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c0cff116-6827-47f2-82d4-0193ebcd2b86","Type":"ContainerStarted","Data":"357596e98a25df108a6cc28d0fe67fe0795d4a64c5ba0956b0bdeec512849786"} Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.020271 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdd8b984-2qbs8" event={"ID":"b81fec6d-3244-43be-8a39-922194f72574","Type":"ContainerStarted","Data":"dc13560bdae7328c85ce0f94de2a07998fdc8c63238857fb80c6ba5f8b925e80"} Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.028692 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.028725 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b" event={"ID":"5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652","Type":"ContainerDied","Data":"c2d2011886acacf667886365914d097fd3dd1f13f5c8d7d84e70203dcb51ec3f"} Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.028786 4788 scope.go:117] "RemoveContainer" containerID="b30e19848fed5fa5fb0b9043f88323e6c228936c8a126be38a058c2128ca8891" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.074217 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"4bae27c1-f73d-4bdb-91a2-185dd601bc33","Type":"ContainerStarted","Data":"c134f3b0854373cfd598c18a3f085a8eda79b5b18f9d85ef3002b38750fe14c8"} Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.100412 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" event={"ID":"1e46d499-ff77-4620-8c50-dcee4ac3af39","Type":"ContainerStarted","Data":"217b9afd30618cc7171651fa083b826f22e3984e8c4ceace67b6cc157748535e"} Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.102783 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.105681 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.105666466 podStartE2EDuration="6.105666466s" podCreationTimestamp="2025-12-11 09:44:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:20.049874759 +0000 UTC m=+1390.120654345" watchObservedRunningTime="2025-12-11 09:44:20.105666466 +0000 UTC m=+1390.176446052" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.141602 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b","Type":"ContainerStarted","Data":"c98b868076a91d40ece7878eb6deb58ccb62dc3a30b4784177e0518a32176b24"} Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.146209 4788 scope.go:117] "RemoveContainer" containerID="4635c2d31878d3ffff94d142d5e4cfa69263e18cdd760b9af8a4abac6336eb26" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.251087 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" podStartSLOduration=4.251004625 podStartE2EDuration="4.251004625s" podCreationTimestamp="2025-12-11 09:44:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:20.164717769 +0000 UTC m=+1390.235497355" watchObservedRunningTime="2025-12-11 09:44:20.251004625 +0000 UTC m=+1390.321784211" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.287298 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b"] Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.304455 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5cc8b5d5c5-fzl5b"] Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.528453 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" path="/var/lib/kubelet/pods/5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652/volumes" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.988872 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-564d966fb9-4l95x"] Dec 11 09:44:20 crc kubenswrapper[4788]: E1211 09:44:20.989613 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" containerName="init" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.989629 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" containerName="init" Dec 11 09:44:20 crc kubenswrapper[4788]: E1211 09:44:20.989645 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" containerName="dnsmasq-dns" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.989651 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" containerName="dnsmasq-dns" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.989855 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d8bf6ec-ea58-45f3-942e-ae0c6e6d1652" containerName="dnsmasq-dns" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.991824 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.994806 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 11 09:44:20 crc kubenswrapper[4788]: I1211 09:44:20.994806 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.012004 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-564d966fb9-4l95x"] Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.125987 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-config\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.126036 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-httpd-config\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.126168 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-combined-ca-bundle\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.126199 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-ovndb-tls-certs\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.126281 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcnd2\" (UniqueName: \"kubernetes.io/projected/ac53e60f-bd33-417c-b606-cbe350b6597a-kube-api-access-wcnd2\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.126417 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-internal-tls-certs\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.126563 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-public-tls-certs\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.155818 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdd8b984-2qbs8" event={"ID":"b81fec6d-3244-43be-8a39-922194f72574","Type":"ContainerStarted","Data":"40867a8e053a95a8dacb5f0f1681376d343de24bd312723b1e8327515bfe6b4b"} Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.156257 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.189332 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dbdd8b984-2qbs8" podStartSLOduration=4.189306857 podStartE2EDuration="4.189306857s" podCreationTimestamp="2025-12-11 09:44:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:21.184057285 +0000 UTC m=+1391.254836871" watchObservedRunningTime="2025-12-11 09:44:21.189306857 +0000 UTC m=+1391.260086463" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.229115 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-combined-ca-bundle\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.229170 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-ovndb-tls-certs\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.229240 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcnd2\" (UniqueName: \"kubernetes.io/projected/ac53e60f-bd33-417c-b606-cbe350b6597a-kube-api-access-wcnd2\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.229287 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-internal-tls-certs\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.229342 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-public-tls-certs\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.229445 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-config\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.229464 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-httpd-config\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.241789 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-config\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.256082 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-ovndb-tls-certs\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.259985 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-combined-ca-bundle\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.260892 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-httpd-config\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.265349 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-internal-tls-certs\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.270243 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac53e60f-bd33-417c-b606-cbe350b6597a-public-tls-certs\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.273710 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcnd2\" (UniqueName: \"kubernetes.io/projected/ac53e60f-bd33-417c-b606-cbe350b6597a-kube-api-access-wcnd2\") pod \"neutron-564d966fb9-4l95x\" (UID: \"ac53e60f-bd33-417c-b606-cbe350b6597a\") " pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.335636 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.377092 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.377156 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.377653 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.378414 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"049e54d0d48c2b5f9432cbc0d1c729dca1d9ca44054024186e494a3f6fa981a9"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 09:44:21 crc kubenswrapper[4788]: I1211 09:44:21.378474 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://049e54d0d48c2b5f9432cbc0d1c729dca1d9ca44054024186e494a3f6fa981a9" gracePeriod=600 Dec 11 09:44:22 crc kubenswrapper[4788]: I1211 09:44:22.013690 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-564d966fb9-4l95x"] Dec 11 09:44:22 crc kubenswrapper[4788]: W1211 09:44:22.022649 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac53e60f_bd33_417c_b606_cbe350b6597a.slice/crio-487e4a8ce937acb4cd91c863449e15d5bf4f020a01e4f511a7b66f2a77d542f4 WatchSource:0}: Error finding container 487e4a8ce937acb4cd91c863449e15d5bf4f020a01e4f511a7b66f2a77d542f4: Status 404 returned error can't find the container with id 487e4a8ce937acb4cd91c863449e15d5bf4f020a01e4f511a7b66f2a77d542f4 Dec 11 09:44:22 crc kubenswrapper[4788]: I1211 09:44:22.176348 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-564d966fb9-4l95x" event={"ID":"ac53e60f-bd33-417c-b606-cbe350b6597a","Type":"ContainerStarted","Data":"487e4a8ce937acb4cd91c863449e15d5bf4f020a01e4f511a7b66f2a77d542f4"} Dec 11 09:44:22 crc kubenswrapper[4788]: I1211 09:44:22.997137 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 11 09:44:23 crc kubenswrapper[4788]: I1211 09:44:23.185825 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 11 09:44:23 crc kubenswrapper[4788]: I1211 09:44:23.185880 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 11 09:44:23 crc kubenswrapper[4788]: I1211 09:44:23.357948 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 11 09:44:23 crc kubenswrapper[4788]: I1211 09:44:23.360959 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 11 09:44:23 crc kubenswrapper[4788]: I1211 09:44:23.529524 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:23 crc kubenswrapper[4788]: I1211 09:44:23.542294 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:44:23 crc kubenswrapper[4788]: I1211 09:44:23.601856 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:44:23 crc kubenswrapper[4788]: I1211 09:44:23.608844 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 09:44:23 crc kubenswrapper[4788]: I1211 09:44:23.694120 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.199934 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="049e54d0d48c2b5f9432cbc0d1c729dca1d9ca44054024186e494a3f6fa981a9" exitCode=0 Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.200011 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"049e54d0d48c2b5f9432cbc0d1c729dca1d9ca44054024186e494a3f6fa981a9"} Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.200351 4788 scope.go:117] "RemoveContainer" containerID="45b7a6743f34908def35f21206c5184bf57302d15d03214ff0c0f9850ad5924d" Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.205577 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b29f4b83-e1ef-49cf-82eb-e7f080c7b28b","Type":"ContainerStarted","Data":"5c4962e520127526fd09e9a43d109cdf6caa249dc2fecf6ca97b16d293c52c59"} Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.212376 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-564d966fb9-4l95x" event={"ID":"ac53e60f-bd33-417c-b606-cbe350b6597a","Type":"ContainerStarted","Data":"38f16815812f4e6ef6cec64a6508be1395872be7a2df3c3a1752b8917b97b28e"} Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.212676 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.212864 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.227429 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=8.227410602 podStartE2EDuration="8.227410602s" podCreationTimestamp="2025-12-11 09:44:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:24.225871107 +0000 UTC m=+1394.296650693" watchObservedRunningTime="2025-12-11 09:44:24.227410602 +0000 UTC m=+1394.298190178" Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.879810 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.880199 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.930323 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:24 crc kubenswrapper[4788]: I1211 09:44:24.939389 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:25 crc kubenswrapper[4788]: I1211 09:44:25.281427 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1"} Dec 11 09:44:25 crc kubenswrapper[4788]: I1211 09:44:25.294738 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-564d966fb9-4l95x" event={"ID":"ac53e60f-bd33-417c-b606-cbe350b6597a","Type":"ContainerStarted","Data":"17d26e07c20bbb948d35d0e4b9a4aa850bc7d9ee865a4ee7fa7ad246bd2b8967"} Dec 11 09:44:25 crc kubenswrapper[4788]: I1211 09:44:25.294788 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:25 crc kubenswrapper[4788]: I1211 09:44:25.303721 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:25 crc kubenswrapper[4788]: I1211 09:44:25.303786 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:25 crc kubenswrapper[4788]: I1211 09:44:25.350782 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-564d966fb9-4l95x" podStartSLOduration=5.350755796 podStartE2EDuration="5.350755796s" podCreationTimestamp="2025-12-11 09:44:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:25.333805282 +0000 UTC m=+1395.404584868" watchObservedRunningTime="2025-12-11 09:44:25.350755796 +0000 UTC m=+1395.421535402" Dec 11 09:44:26 crc kubenswrapper[4788]: I1211 09:44:26.363442 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:26 crc kubenswrapper[4788]: I1211 09:44:26.682679 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.292476 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.316130 4788 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.316179 4788 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.351032 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.384489 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-mtsfp"] Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.384786 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" podUID="3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" containerName="dnsmasq-dns" containerID="cri-o://f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4" gracePeriod=10 Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.784995 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.798160 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-65f8fcd946-pfr68" Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.899986 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-8495b6dc44-4jlzn"] Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.900204 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api-log" containerID="cri-o://61982536d201c64530ed5c611d1bf705bb3a0b0a910f23d0e16bdfcd895f175c" gracePeriod=30 Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.900798 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api" containerID="cri-o://96e995953b29cfeb30a0dfecf22b26dfeb84d8c0bf2cd378eeba26e5bf71e2a5" gracePeriod=30 Dec 11 09:44:27 crc kubenswrapper[4788]: I1211 09:44:27.919948 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": EOF" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.002575 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.184486 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.264734 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-dns-svc\") pod \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.264837 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-sb\") pod \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.264902 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-nb\") pod \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.264971 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-config\") pod \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.265047 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5mpq\" (UniqueName: \"kubernetes.io/projected/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-kube-api-access-s5mpq\") pod \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.276523 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-kube-api-access-s5mpq" (OuterVolumeSpecName: "kube-api-access-s5mpq") pod "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" (UID: "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8"). InnerVolumeSpecName "kube-api-access-s5mpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.289303 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-d6d65bd59-g294c"] Dec 11 09:44:28 crc kubenswrapper[4788]: E1211 09:44:28.289735 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" containerName="init" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.289752 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" containerName="init" Dec 11 09:44:28 crc kubenswrapper[4788]: E1211 09:44:28.289795 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" containerName="dnsmasq-dns" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.289803 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" containerName="dnsmasq-dns" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.289974 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" containerName="dnsmasq-dns" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.303489 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.326304 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.326496 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.326607 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.342015 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-d6d65bd59-g294c"] Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.368102 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" (UID: "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.369725 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-229fn\" (UniqueName: \"kubernetes.io/projected/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-kube-api-access-229fn\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.369792 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-config-data\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.369823 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-log-httpd\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.369855 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-combined-ca-bundle\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.369907 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-etc-swift\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.369945 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-run-httpd\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.369988 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-public-tls-certs\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.370038 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-internal-tls-certs\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.370129 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5mpq\" (UniqueName: \"kubernetes.io/projected/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-kube-api-access-s5mpq\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.370152 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.399523 4788 generic.go:334] "Generic (PLEG): container finished" podID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerID="61982536d201c64530ed5c611d1bf705bb3a0b0a910f23d0e16bdfcd895f175c" exitCode=143 Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.399643 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8495b6dc44-4jlzn" event={"ID":"bc4edfe8-5936-4f7e-892e-49a685d1081b","Type":"ContainerDied","Data":"61982536d201c64530ed5c611d1bf705bb3a0b0a910f23d0e16bdfcd895f175c"} Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.399854 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" (UID: "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.414975 4788 generic.go:334] "Generic (PLEG): container finished" podID="3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" containerID="f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4" exitCode=0 Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.415461 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" event={"ID":"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8","Type":"ContainerDied","Data":"f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4"} Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.415477 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.415514 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-mtsfp" event={"ID":"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8","Type":"ContainerDied","Data":"d3b4b33f663678159298253408f03acfbe869edc99e05aef1fd28c7e9d49ccfd"} Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.415538 4788 scope.go:117] "RemoveContainer" containerID="f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.422341 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-config" (OuterVolumeSpecName: "config") pod "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" (UID: "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.474250 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" (UID: "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.475851 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-nb\") pod \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\" (UID: \"3e0171c8-cfda-4c64-9a70-5d1e596c8cd8\") " Dec 11 09:44:28 crc kubenswrapper[4788]: W1211 09:44:28.476113 4788 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8/volumes/kubernetes.io~configmap/ovsdbserver-nb Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.476157 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" (UID: "3e0171c8-cfda-4c64-9a70-5d1e596c8cd8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.476640 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-etc-swift\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.476857 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-run-httpd\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.477017 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-public-tls-certs\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.477206 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-internal-tls-certs\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.477489 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-229fn\" (UniqueName: \"kubernetes.io/projected/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-kube-api-access-229fn\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.477659 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-config-data\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.477754 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-log-httpd\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.477870 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-combined-ca-bundle\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.478105 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.478191 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.478281 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.484972 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-log-httpd\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.485221 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-run-httpd\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.494133 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-public-tls-certs\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.518121 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-internal-tls-certs\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.519097 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-combined-ca-bundle\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.519752 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-etc-swift\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.520211 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-config-data\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.520417 4788 scope.go:117] "RemoveContainer" containerID="45b28d4c23b4185f8a0f10b0cd669454912ed29a18d463ebc51547595454541c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.529143 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-229fn\" (UniqueName: \"kubernetes.io/projected/c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5-kube-api-access-229fn\") pod \"swift-proxy-d6d65bd59-g294c\" (UID: \"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5\") " pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.662593 4788 scope.go:117] "RemoveContainer" containerID="f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4" Dec 11 09:44:28 crc kubenswrapper[4788]: E1211 09:44:28.663109 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4\": container with ID starting with f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4 not found: ID does not exist" containerID="f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.663184 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4"} err="failed to get container status \"f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4\": rpc error: code = NotFound desc = could not find container \"f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4\": container with ID starting with f0eab22bc4ebefcdd1e8d7c584169cabfcfa680e7b19fc03b1962ed3c44fddb4 not found: ID does not exist" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.663274 4788 scope.go:117] "RemoveContainer" containerID="45b28d4c23b4185f8a0f10b0cd669454912ed29a18d463ebc51547595454541c" Dec 11 09:44:28 crc kubenswrapper[4788]: E1211 09:44:28.664471 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45b28d4c23b4185f8a0f10b0cd669454912ed29a18d463ebc51547595454541c\": container with ID starting with 45b28d4c23b4185f8a0f10b0cd669454912ed29a18d463ebc51547595454541c not found: ID does not exist" containerID="45b28d4c23b4185f8a0f10b0cd669454912ed29a18d463ebc51547595454541c" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.664507 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45b28d4c23b4185f8a0f10b0cd669454912ed29a18d463ebc51547595454541c"} err="failed to get container status \"45b28d4c23b4185f8a0f10b0cd669454912ed29a18d463ebc51547595454541c\": rpc error: code = NotFound desc = could not find container \"45b28d4c23b4185f8a0f10b0cd669454912ed29a18d463ebc51547595454541c\": container with ID starting with 45b28d4c23b4185f8a0f10b0cd669454912ed29a18d463ebc51547595454541c not found: ID does not exist" Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.745355 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-mtsfp"] Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.755700 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-mtsfp"] Dec 11 09:44:28 crc kubenswrapper[4788]: I1211 09:44:28.757923 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:29 crc kubenswrapper[4788]: I1211 09:44:29.005042 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 11 09:44:29 crc kubenswrapper[4788]: I1211 09:44:29.005569 4788 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 09:44:29 crc kubenswrapper[4788]: I1211 09:44:29.007056 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 11 09:44:30 crc kubenswrapper[4788]: I1211 09:44:29.756806 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:30 crc kubenswrapper[4788]: I1211 09:44:29.757277 4788 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 09:44:30 crc kubenswrapper[4788]: I1211 09:44:29.758493 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 11 09:44:30 crc kubenswrapper[4788]: I1211 09:44:30.150323 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-d6d65bd59-g294c"] Dec 11 09:44:30 crc kubenswrapper[4788]: I1211 09:44:30.485017 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-d6d65bd59-g294c" event={"ID":"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5","Type":"ContainerStarted","Data":"6f86500b522ca93886ded15acba9ac68341adcb8db8176c2be636090168e1d7a"} Dec 11 09:44:30 crc kubenswrapper[4788]: I1211 09:44:30.518894 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e0171c8-cfda-4c64-9a70-5d1e596c8cd8" path="/var/lib/kubelet/pods/3e0171c8-cfda-4c64-9a70-5d1e596c8cd8/volumes" Dec 11 09:44:31 crc kubenswrapper[4788]: I1211 09:44:31.188346 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:54604->10.217.0.158:9311: read: connection reset by peer" Dec 11 09:44:31 crc kubenswrapper[4788]: I1211 09:44:31.188386 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": read tcp 10.217.0.2:54614->10.217.0.158:9311: read: connection reset by peer" Dec 11 09:44:31 crc kubenswrapper[4788]: I1211 09:44:31.505533 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-d6d65bd59-g294c" event={"ID":"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5","Type":"ContainerStarted","Data":"0c0c3efd8c7eca5b260e94fad5fa48af6a04811a22451d927add814832ecdfa1"} Dec 11 09:44:31 crc kubenswrapper[4788]: I1211 09:44:31.505600 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-d6d65bd59-g294c" event={"ID":"c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5","Type":"ContainerStarted","Data":"192c598571b5ed9d28f2de26685a99a7726fe11334196bc4112fba1c795a1764"} Dec 11 09:44:31 crc kubenswrapper[4788]: I1211 09:44:31.506738 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:31 crc kubenswrapper[4788]: I1211 09:44:31.506774 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:31 crc kubenswrapper[4788]: I1211 09:44:31.517775 4788 generic.go:334] "Generic (PLEG): container finished" podID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerID="96e995953b29cfeb30a0dfecf22b26dfeb84d8c0bf2cd378eeba26e5bf71e2a5" exitCode=0 Dec 11 09:44:31 crc kubenswrapper[4788]: I1211 09:44:31.517817 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8495b6dc44-4jlzn" event={"ID":"bc4edfe8-5936-4f7e-892e-49a685d1081b","Type":"ContainerDied","Data":"96e995953b29cfeb30a0dfecf22b26dfeb84d8c0bf2cd378eeba26e5bf71e2a5"} Dec 11 09:44:31 crc kubenswrapper[4788]: I1211 09:44:31.546827 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-d6d65bd59-g294c" podStartSLOduration=3.546806223 podStartE2EDuration="3.546806223s" podCreationTimestamp="2025-12-11 09:44:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:31.528556299 +0000 UTC m=+1401.599335885" watchObservedRunningTime="2025-12-11 09:44:31.546806223 +0000 UTC m=+1401.617585809" Dec 11 09:44:32 crc kubenswrapper[4788]: I1211 09:44:32.543541 4788 generic.go:334] "Generic (PLEG): container finished" podID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerID="3ab4d8a37a6640fdbef87ecf806b719ae4db92ef4aa4c3f637a9272b668d1ba8" exitCode=137 Dec 11 09:44:32 crc kubenswrapper[4788]: I1211 09:44:32.543660 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ac200486-ecb7-44f5-8700-a4a917d4751c","Type":"ContainerDied","Data":"3ab4d8a37a6640fdbef87ecf806b719ae4db92ef4aa4c3f637a9272b668d1ba8"} Dec 11 09:44:32 crc kubenswrapper[4788]: I1211 09:44:32.595502 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:32 crc kubenswrapper[4788]: I1211 09:44:32.595878 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="ceilometer-central-agent" containerID="cri-o://a5d62c3842c9a43047429c4ed55efc52c6fee3e7658747fec25160c2bd89cc3a" gracePeriod=30 Dec 11 09:44:32 crc kubenswrapper[4788]: I1211 09:44:32.596468 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="proxy-httpd" containerID="cri-o://b57dc0472552391b3471ba9906bac0d7704efbb94a66e456534a331afbacfb67" gracePeriod=30 Dec 11 09:44:32 crc kubenswrapper[4788]: I1211 09:44:32.596520 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="sg-core" containerID="cri-o://b7812425c7ced758d00e2282c878e846212cd1bb708712d18f9d65e00e83f43c" gracePeriod=30 Dec 11 09:44:32 crc kubenswrapper[4788]: I1211 09:44:32.596558 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="ceilometer-notification-agent" containerID="cri-o://83f0a2be254451d17a9dd1b00b4a7f9898cb8f2c65b2b75d35540819837a6849" gracePeriod=30 Dec 11 09:44:32 crc kubenswrapper[4788]: I1211 09:44:32.670224 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:44:33 crc kubenswrapper[4788]: I1211 09:44:33.554662 4788 generic.go:334] "Generic (PLEG): container finished" podID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerID="b57dc0472552391b3471ba9906bac0d7704efbb94a66e456534a331afbacfb67" exitCode=0 Dec 11 09:44:33 crc kubenswrapper[4788]: I1211 09:44:33.555123 4788 generic.go:334] "Generic (PLEG): container finished" podID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerID="b7812425c7ced758d00e2282c878e846212cd1bb708712d18f9d65e00e83f43c" exitCode=2 Dec 11 09:44:33 crc kubenswrapper[4788]: I1211 09:44:33.555144 4788 generic.go:334] "Generic (PLEG): container finished" podID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerID="a5d62c3842c9a43047429c4ed55efc52c6fee3e7658747fec25160c2bd89cc3a" exitCode=0 Dec 11 09:44:33 crc kubenswrapper[4788]: I1211 09:44:33.554826 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe6f1eec-5386-445d-8700-eb6af297dc28","Type":"ContainerDied","Data":"b57dc0472552391b3471ba9906bac0d7704efbb94a66e456534a331afbacfb67"} Dec 11 09:44:33 crc kubenswrapper[4788]: I1211 09:44:33.555200 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe6f1eec-5386-445d-8700-eb6af297dc28","Type":"ContainerDied","Data":"b7812425c7ced758d00e2282c878e846212cd1bb708712d18f9d65e00e83f43c"} Dec 11 09:44:33 crc kubenswrapper[4788]: I1211 09:44:33.555215 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe6f1eec-5386-445d-8700-eb6af297dc28","Type":"ContainerDied","Data":"a5d62c3842c9a43047429c4ed55efc52c6fee3e7658747fec25160c2bd89cc3a"} Dec 11 09:44:34 crc kubenswrapper[4788]: I1211 09:44:34.690014 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-84d5c869dd-hzg6f" Dec 11 09:44:34 crc kubenswrapper[4788]: I1211 09:44:34.773572 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7db769bcbd-mnjsv"] Dec 11 09:44:34 crc kubenswrapper[4788]: I1211 09:44:34.773834 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7db769bcbd-mnjsv" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon-log" containerID="cri-o://72010a1f7a57b545f29717a8fe4161c71457bb11fdbaad6855c991d6da0ab1b2" gracePeriod=30 Dec 11 09:44:34 crc kubenswrapper[4788]: I1211 09:44:34.774353 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7db769bcbd-mnjsv" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon" containerID="cri-o://1639642baf74b2716e381bba0e64b2517d8c67a0998e8c81296f78c615d937a7" gracePeriod=30 Dec 11 09:44:34 crc kubenswrapper[4788]: I1211 09:44:34.931090 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": dial tcp 10.217.0.158:9311: connect: connection refused" Dec 11 09:44:34 crc kubenswrapper[4788]: I1211 09:44:34.931205 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:34 crc kubenswrapper[4788]: I1211 09:44:34.931771 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": dial tcp 10.217.0.158:9311: connect: connection refused" Dec 11 09:44:35 crc kubenswrapper[4788]: I1211 09:44:35.106599 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.149:8776/healthcheck\": dial tcp 10.217.0.149:8776: connect: connection refused" Dec 11 09:44:35 crc kubenswrapper[4788]: I1211 09:44:35.587327 4788 generic.go:334] "Generic (PLEG): container finished" podID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerID="83f0a2be254451d17a9dd1b00b4a7f9898cb8f2c65b2b75d35540819837a6849" exitCode=0 Dec 11 09:44:35 crc kubenswrapper[4788]: I1211 09:44:35.587390 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe6f1eec-5386-445d-8700-eb6af297dc28","Type":"ContainerDied","Data":"83f0a2be254451d17a9dd1b00b4a7f9898cb8f2c65b2b75d35540819837a6849"} Dec 11 09:44:38 crc kubenswrapper[4788]: I1211 09:44:38.766514 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:38 crc kubenswrapper[4788]: I1211 09:44:38.770139 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-d6d65bd59-g294c" Dec 11 09:44:39 crc kubenswrapper[4788]: I1211 09:44:39.624299 4788 generic.go:334] "Generic (PLEG): container finished" podID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerID="1639642baf74b2716e381bba0e64b2517d8c67a0998e8c81296f78c615d937a7" exitCode=0 Dec 11 09:44:39 crc kubenswrapper[4788]: I1211 09:44:39.624374 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db769bcbd-mnjsv" event={"ID":"a9231129-6aaf-4d8e-83fe-cc79ba9d135b","Type":"ContainerDied","Data":"1639642baf74b2716e381bba0e64b2517d8c67a0998e8c81296f78c615d937a7"} Dec 11 09:44:39 crc kubenswrapper[4788]: I1211 09:44:39.931181 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": dial tcp 10.217.0.158:9311: connect: connection refused" Dec 11 09:44:39 crc kubenswrapper[4788]: I1211 09:44:39.932948 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8495b6dc44-4jlzn" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.158:9311/healthcheck\": dial tcp 10.217.0.158:9311: connect: connection refused" Dec 11 09:44:39 crc kubenswrapper[4788]: I1211 09:44:39.933175 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:40 crc kubenswrapper[4788]: I1211 09:44:40.106833 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.149:8776/healthcheck\": dial tcp 10.217.0.149:8776: connect: connection refused" Dec 11 09:44:40 crc kubenswrapper[4788]: I1211 09:44:40.405628 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7db769bcbd-mnjsv" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Dec 11 09:44:40 crc kubenswrapper[4788]: I1211 09:44:40.439416 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 11 09:44:40 crc kubenswrapper[4788]: I1211 09:44:40.440073 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="dd95dacb-1402-422f-a9c0-f1e8a4b6d01e" containerName="kube-state-metrics" containerID="cri-o://b0d88b6b80367a63483f41256f0fdf3fc47fd15107ea5357bcdd042d59785db2" gracePeriod=30 Dec 11 09:44:42 crc kubenswrapper[4788]: E1211 09:44:42.548454 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified" Dec 11 09:44:42 crc kubenswrapper[4788]: E1211 09:44:42.549387 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5dh64hf7h579h65dh58ch674h9h5b5hd7hd8hb8h655h58bhcch69h65h665h65dh8fhbfh545h7bh68fh574h58dhcdh666h94h64bh577h647q,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5bxh5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(4bae27c1-f73d-4bdb-91a2-185dd601bc33): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:44:42 crc kubenswrapper[4788]: E1211 09:44:42.550820 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="4bae27c1-f73d-4bdb-91a2-185dd601bc33" Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.676043 4788 generic.go:334] "Generic (PLEG): container finished" podID="dd95dacb-1402-422f-a9c0-f1e8a4b6d01e" containerID="b0d88b6b80367a63483f41256f0fdf3fc47fd15107ea5357bcdd042d59785db2" exitCode=2 Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.676163 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dd95dacb-1402-422f-a9c0-f1e8a4b6d01e","Type":"ContainerDied","Data":"b0d88b6b80367a63483f41256f0fdf3fc47fd15107ea5357bcdd042d59785db2"} Dec 11 09:44:42 crc kubenswrapper[4788]: E1211 09:44:42.706437 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\"\"" pod="openstack/openstackclient" podUID="4bae27c1-f73d-4bdb-91a2-185dd601bc33" Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.827749 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.924179 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc4edfe8-5936-4f7e-892e-49a685d1081b-logs\") pod \"bc4edfe8-5936-4f7e-892e-49a685d1081b\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.924347 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data\") pod \"bc4edfe8-5936-4f7e-892e-49a685d1081b\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.924473 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjspj\" (UniqueName: \"kubernetes.io/projected/bc4edfe8-5936-4f7e-892e-49a685d1081b-kube-api-access-rjspj\") pod \"bc4edfe8-5936-4f7e-892e-49a685d1081b\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.924538 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-combined-ca-bundle\") pod \"bc4edfe8-5936-4f7e-892e-49a685d1081b\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.924566 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data-custom\") pod \"bc4edfe8-5936-4f7e-892e-49a685d1081b\" (UID: \"bc4edfe8-5936-4f7e-892e-49a685d1081b\") " Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.926268 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc4edfe8-5936-4f7e-892e-49a685d1081b-logs" (OuterVolumeSpecName: "logs") pod "bc4edfe8-5936-4f7e-892e-49a685d1081b" (UID: "bc4edfe8-5936-4f7e-892e-49a685d1081b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.931035 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bc4edfe8-5936-4f7e-892e-49a685d1081b" (UID: "bc4edfe8-5936-4f7e-892e-49a685d1081b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.931726 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc4edfe8-5936-4f7e-892e-49a685d1081b-kube-api-access-rjspj" (OuterVolumeSpecName: "kube-api-access-rjspj") pod "bc4edfe8-5936-4f7e-892e-49a685d1081b" (UID: "bc4edfe8-5936-4f7e-892e-49a685d1081b"). InnerVolumeSpecName "kube-api-access-rjspj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.968216 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bc4edfe8-5936-4f7e-892e-49a685d1081b" (UID: "bc4edfe8-5936-4f7e-892e-49a685d1081b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:42 crc kubenswrapper[4788]: I1211 09:44:42.997371 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data" (OuterVolumeSpecName: "config-data") pod "bc4edfe8-5936-4f7e-892e-49a685d1081b" (UID: "bc4edfe8-5936-4f7e-892e-49a685d1081b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.026446 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjspj\" (UniqueName: \"kubernetes.io/projected/bc4edfe8-5936-4f7e-892e-49a685d1081b-kube-api-access-rjspj\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.027175 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.027412 4788 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.027516 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc4edfe8-5936-4f7e-892e-49a685d1081b-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.027610 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc4edfe8-5936-4f7e-892e-49a685d1081b-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.031935 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.050340 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.086912 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.230277 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data-custom\") pod \"ac200486-ecb7-44f5-8700-a4a917d4751c\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.230540 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-scripts\") pod \"fe6f1eec-5386-445d-8700-eb6af297dc28\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.230686 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-combined-ca-bundle\") pod \"fe6f1eec-5386-445d-8700-eb6af297dc28\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.230851 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-config-data\") pod \"fe6f1eec-5386-445d-8700-eb6af297dc28\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.231304 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-sg-core-conf-yaml\") pod \"fe6f1eec-5386-445d-8700-eb6af297dc28\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.231499 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-log-httpd\") pod \"fe6f1eec-5386-445d-8700-eb6af297dc28\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.231676 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data\") pod \"ac200486-ecb7-44f5-8700-a4a917d4751c\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.231790 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-scripts\") pod \"ac200486-ecb7-44f5-8700-a4a917d4751c\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.231873 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-run-httpd\") pod \"fe6f1eec-5386-445d-8700-eb6af297dc28\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.231953 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkhpc\" (UniqueName: \"kubernetes.io/projected/ac200486-ecb7-44f5-8700-a4a917d4751c-kube-api-access-kkhpc\") pod \"ac200486-ecb7-44f5-8700-a4a917d4751c\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.232060 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hzdg\" (UniqueName: \"kubernetes.io/projected/dd95dacb-1402-422f-a9c0-f1e8a4b6d01e-kube-api-access-8hzdg\") pod \"dd95dacb-1402-422f-a9c0-f1e8a4b6d01e\" (UID: \"dd95dacb-1402-422f-a9c0-f1e8a4b6d01e\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.232132 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac200486-ecb7-44f5-8700-a4a917d4751c-logs\") pod \"ac200486-ecb7-44f5-8700-a4a917d4751c\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.232197 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ac200486-ecb7-44f5-8700-a4a917d4751c-etc-machine-id\") pod \"ac200486-ecb7-44f5-8700-a4a917d4751c\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.232270 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fe6f1eec-5386-445d-8700-eb6af297dc28" (UID: "fe6f1eec-5386-445d-8700-eb6af297dc28"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.232432 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-combined-ca-bundle\") pod \"ac200486-ecb7-44f5-8700-a4a917d4751c\" (UID: \"ac200486-ecb7-44f5-8700-a4a917d4751c\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.232537 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9wb7\" (UniqueName: \"kubernetes.io/projected/fe6f1eec-5386-445d-8700-eb6af297dc28-kube-api-access-t9wb7\") pod \"fe6f1eec-5386-445d-8700-eb6af297dc28\" (UID: \"fe6f1eec-5386-445d-8700-eb6af297dc28\") " Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.233348 4788 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.233999 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ac200486-ecb7-44f5-8700-a4a917d4751c" (UID: "ac200486-ecb7-44f5-8700-a4a917d4751c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.235350 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac200486-ecb7-44f5-8700-a4a917d4751c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ac200486-ecb7-44f5-8700-a4a917d4751c" (UID: "ac200486-ecb7-44f5-8700-a4a917d4751c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.235709 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac200486-ecb7-44f5-8700-a4a917d4751c-logs" (OuterVolumeSpecName: "logs") pod "ac200486-ecb7-44f5-8700-a4a917d4751c" (UID: "ac200486-ecb7-44f5-8700-a4a917d4751c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.235911 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-scripts" (OuterVolumeSpecName: "scripts") pod "fe6f1eec-5386-445d-8700-eb6af297dc28" (UID: "fe6f1eec-5386-445d-8700-eb6af297dc28"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.238180 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe6f1eec-5386-445d-8700-eb6af297dc28-kube-api-access-t9wb7" (OuterVolumeSpecName: "kube-api-access-t9wb7") pod "fe6f1eec-5386-445d-8700-eb6af297dc28" (UID: "fe6f1eec-5386-445d-8700-eb6af297dc28"). InnerVolumeSpecName "kube-api-access-t9wb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.239940 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-scripts" (OuterVolumeSpecName: "scripts") pod "ac200486-ecb7-44f5-8700-a4a917d4751c" (UID: "ac200486-ecb7-44f5-8700-a4a917d4751c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.240465 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac200486-ecb7-44f5-8700-a4a917d4751c-kube-api-access-kkhpc" (OuterVolumeSpecName: "kube-api-access-kkhpc") pod "ac200486-ecb7-44f5-8700-a4a917d4751c" (UID: "ac200486-ecb7-44f5-8700-a4a917d4751c"). InnerVolumeSpecName "kube-api-access-kkhpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.240828 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd95dacb-1402-422f-a9c0-f1e8a4b6d01e-kube-api-access-8hzdg" (OuterVolumeSpecName: "kube-api-access-8hzdg") pod "dd95dacb-1402-422f-a9c0-f1e8a4b6d01e" (UID: "dd95dacb-1402-422f-a9c0-f1e8a4b6d01e"). InnerVolumeSpecName "kube-api-access-8hzdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.240985 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fe6f1eec-5386-445d-8700-eb6af297dc28" (UID: "fe6f1eec-5386-445d-8700-eb6af297dc28"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.282432 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fe6f1eec-5386-445d-8700-eb6af297dc28" (UID: "fe6f1eec-5386-445d-8700-eb6af297dc28"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.303580 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac200486-ecb7-44f5-8700-a4a917d4751c" (UID: "ac200486-ecb7-44f5-8700-a4a917d4751c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.336084 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hzdg\" (UniqueName: \"kubernetes.io/projected/dd95dacb-1402-422f-a9c0-f1e8a4b6d01e-kube-api-access-8hzdg\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.336126 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac200486-ecb7-44f5-8700-a4a917d4751c-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.336138 4788 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ac200486-ecb7-44f5-8700-a4a917d4751c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.336146 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.336155 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9wb7\" (UniqueName: \"kubernetes.io/projected/fe6f1eec-5386-445d-8700-eb6af297dc28-kube-api-access-t9wb7\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.336164 4788 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.336172 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.336181 4788 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.336189 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.336197 4788 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe6f1eec-5386-445d-8700-eb6af297dc28-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.336207 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkhpc\" (UniqueName: \"kubernetes.io/projected/ac200486-ecb7-44f5-8700-a4a917d4751c-kube-api-access-kkhpc\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.337559 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data" (OuterVolumeSpecName: "config-data") pod "ac200486-ecb7-44f5-8700-a4a917d4751c" (UID: "ac200486-ecb7-44f5-8700-a4a917d4751c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.416264 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe6f1eec-5386-445d-8700-eb6af297dc28" (UID: "fe6f1eec-5386-445d-8700-eb6af297dc28"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.438118 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac200486-ecb7-44f5-8700-a4a917d4751c-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.438174 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.447015 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-config-data" (OuterVolumeSpecName: "config-data") pod "fe6f1eec-5386-445d-8700-eb6af297dc28" (UID: "fe6f1eec-5386-445d-8700-eb6af297dc28"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.539710 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6f1eec-5386-445d-8700-eb6af297dc28-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.685790 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dd95dacb-1402-422f-a9c0-f1e8a4b6d01e","Type":"ContainerDied","Data":"a82ca67c80c9e4eb86fb5169b500814bb4167ce70346e321b5ce3606d486261b"} Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.685841 4788 scope.go:117] "RemoveContainer" containerID="b0d88b6b80367a63483f41256f0fdf3fc47fd15107ea5357bcdd042d59785db2" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.685958 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.689267 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe6f1eec-5386-445d-8700-eb6af297dc28","Type":"ContainerDied","Data":"3364c749c08f74544ea90fdc9cc2d4232f817908a9e16f3982d578603b9b5600"} Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.689309 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.691885 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.691901 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ac200486-ecb7-44f5-8700-a4a917d4751c","Type":"ContainerDied","Data":"7d3350dac47aaeb1c1475770e8968514ae9b3f5ebd0fc5783490b6ab9c6e811b"} Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.693770 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8495b6dc44-4jlzn" event={"ID":"bc4edfe8-5936-4f7e-892e-49a685d1081b","Type":"ContainerDied","Data":"91a20a837a2bdec6ca4ea9905969497dbce057cb58e473cbfcbd478ea092b75d"} Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.693894 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8495b6dc44-4jlzn" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.711442 4788 scope.go:117] "RemoveContainer" containerID="b57dc0472552391b3471ba9906bac0d7704efbb94a66e456534a331afbacfb67" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.741765 4788 scope.go:117] "RemoveContainer" containerID="b7812425c7ced758d00e2282c878e846212cd1bb708712d18f9d65e00e83f43c" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.768554 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.784368 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.800470 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.800638 4788 scope.go:117] "RemoveContainer" containerID="83f0a2be254451d17a9dd1b00b4a7f9898cb8f2c65b2b75d35540819837a6849" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.814338 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 11 09:44:43 crc kubenswrapper[4788]: E1211 09:44:43.814843 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerName="cinder-api" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.814866 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerName="cinder-api" Dec 11 09:44:43 crc kubenswrapper[4788]: E1211 09:44:43.814884 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="sg-core" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.814893 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="sg-core" Dec 11 09:44:43 crc kubenswrapper[4788]: E1211 09:44:43.814922 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd95dacb-1402-422f-a9c0-f1e8a4b6d01e" containerName="kube-state-metrics" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.814929 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd95dacb-1402-422f-a9c0-f1e8a4b6d01e" containerName="kube-state-metrics" Dec 11 09:44:43 crc kubenswrapper[4788]: E1211 09:44:43.814939 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="proxy-httpd" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.814945 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="proxy-httpd" Dec 11 09:44:43 crc kubenswrapper[4788]: E1211 09:44:43.814958 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api-log" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.814965 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api-log" Dec 11 09:44:43 crc kubenswrapper[4788]: E1211 09:44:43.814977 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="ceilometer-central-agent" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.814983 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="ceilometer-central-agent" Dec 11 09:44:43 crc kubenswrapper[4788]: E1211 09:44:43.814995 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="ceilometer-notification-agent" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815001 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="ceilometer-notification-agent" Dec 11 09:44:43 crc kubenswrapper[4788]: E1211 09:44:43.815018 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815024 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api" Dec 11 09:44:43 crc kubenswrapper[4788]: E1211 09:44:43.815035 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerName="cinder-api-log" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815041 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerName="cinder-api-log" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815220 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815245 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" containerName="barbican-api-log" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815255 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerName="cinder-api-log" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815265 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd95dacb-1402-422f-a9c0-f1e8a4b6d01e" containerName="kube-state-metrics" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815272 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="proxy-httpd" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815290 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac200486-ecb7-44f5-8700-a4a917d4751c" containerName="cinder-api" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815301 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="ceilometer-central-agent" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815314 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="sg-core" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.815324 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" containerName="ceilometer-notification-agent" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.816046 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.829679 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.829886 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.830363 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-wxqsj" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.830678 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.849837 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.856656 4788 scope.go:117] "RemoveContainer" containerID="a5d62c3842c9a43047429c4ed55efc52c6fee3e7658747fec25160c2bd89cc3a" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.863331 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-946jc\" (UniqueName: \"kubernetes.io/projected/6679f5b3-4784-41d8-8475-fc65b77bb7c7-kube-api-access-946jc\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.863490 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6679f5b3-4784-41d8-8475-fc65b77bb7c7-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.863527 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6679f5b3-4784-41d8-8475-fc65b77bb7c7-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.863571 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6679f5b3-4784-41d8-8475-fc65b77bb7c7-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.870301 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.872850 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.878343 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.878567 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.878816 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.901320 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-8495b6dc44-4jlzn"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.914841 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-8495b6dc44-4jlzn"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.930836 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.941958 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.944057 4788 scope.go:117] "RemoveContainer" containerID="3ab4d8a37a6640fdbef87ecf806b719ae4db92ef4aa4c3f637a9272b668d1ba8" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.950113 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966164 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966366 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-946jc\" (UniqueName: \"kubernetes.io/projected/6679f5b3-4784-41d8-8475-fc65b77bb7c7-kube-api-access-946jc\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966429 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-log-httpd\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966452 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-scripts\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966472 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-config-data\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966569 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966638 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6679f5b3-4784-41d8-8475-fc65b77bb7c7-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966670 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966698 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6679f5b3-4784-41d8-8475-fc65b77bb7c7-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966728 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p69b\" (UniqueName: \"kubernetes.io/projected/7c2390d5-fa1d-4ce5-8b02-17424a908eff-kube-api-access-5p69b\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966779 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6679f5b3-4784-41d8-8475-fc65b77bb7c7-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.966814 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-run-httpd\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.972851 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6679f5b3-4784-41d8-8475-fc65b77bb7c7-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.972898 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.973564 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6679f5b3-4784-41d8-8475-fc65b77bb7c7-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.974444 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.992696 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.993202 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.994599 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 11 09:44:43 crc kubenswrapper[4788]: I1211 09:44:43.996869 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-946jc\" (UniqueName: \"kubernetes.io/projected/6679f5b3-4784-41d8-8475-fc65b77bb7c7-kube-api-access-946jc\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:43.999933 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6679f5b3-4784-41d8-8475-fc65b77bb7c7-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6679f5b3-4784-41d8-8475-fc65b77bb7c7\") " pod="openstack/kube-state-metrics-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.021688 4788 scope.go:117] "RemoveContainer" containerID="66df2455f6a399de3f08617c1be7b5ece192df9d629fc5aac6ef46336fbb67f1" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.032104 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.074613 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.074730 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-log-httpd\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.074783 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-config-data\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.074806 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-scripts\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.074880 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.077002 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-log-httpd\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.077694 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.077757 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p69b\" (UniqueName: \"kubernetes.io/projected/7c2390d5-fa1d-4ce5-8b02-17424a908eff-kube-api-access-5p69b\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.077814 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-run-httpd\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.078162 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-run-httpd\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.081611 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.082667 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.083170 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.083952 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-scripts\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.087768 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-config-data\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.097032 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p69b\" (UniqueName: \"kubernetes.io/projected/7c2390d5-fa1d-4ce5-8b02-17424a908eff-kube-api-access-5p69b\") pod \"ceilometer-0\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.099847 4788 scope.go:117] "RemoveContainer" containerID="96e995953b29cfeb30a0dfecf22b26dfeb84d8c0bf2cd378eeba26e5bf71e2a5" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.122638 4788 scope.go:117] "RemoveContainer" containerID="61982536d201c64530ed5c611d1bf705bb3a0b0a910f23d0e16bdfcd895f175c" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.166153 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.180066 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.180122 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/300a8660-46c4-426b-b4a2-3f713fe639b2-logs\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.180150 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.180171 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kfth\" (UniqueName: \"kubernetes.io/projected/300a8660-46c4-426b-b4a2-3f713fe639b2-kube-api-access-9kfth\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.180281 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-scripts\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.180306 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-config-data\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.180330 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.180352 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.180371 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/300a8660-46c4-426b-b4a2-3f713fe639b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.202882 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.282029 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.282578 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.282628 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/300a8660-46c4-426b-b4a2-3f713fe639b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.282733 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.282781 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/300a8660-46c4-426b-b4a2-3f713fe639b2-logs\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.282806 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.282826 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kfth\" (UniqueName: \"kubernetes.io/projected/300a8660-46c4-426b-b4a2-3f713fe639b2-kube-api-access-9kfth\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.283029 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-scripts\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.283061 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-config-data\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.289770 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.291755 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/300a8660-46c4-426b-b4a2-3f713fe639b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.291743 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/300a8660-46c4-426b-b4a2-3f713fe639b2-logs\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.292822 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-config-data\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.303123 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-scripts\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.304545 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.306309 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.315804 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/300a8660-46c4-426b-b4a2-3f713fe639b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.329274 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kfth\" (UniqueName: \"kubernetes.io/projected/300a8660-46c4-426b-b4a2-3f713fe639b2-kube-api-access-9kfth\") pod \"cinder-api-0\" (UID: \"300a8660-46c4-426b-b4a2-3f713fe639b2\") " pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.398674 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.542353 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac200486-ecb7-44f5-8700-a4a917d4751c" path="/var/lib/kubelet/pods/ac200486-ecb7-44f5-8700-a4a917d4751c/volumes" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.543292 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc4edfe8-5936-4f7e-892e-49a685d1081b" path="/var/lib/kubelet/pods/bc4edfe8-5936-4f7e-892e-49a685d1081b/volumes" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.544317 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd95dacb-1402-422f-a9c0-f1e8a4b6d01e" path="/var/lib/kubelet/pods/dd95dacb-1402-422f-a9c0-f1e8a4b6d01e/volumes" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.546463 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe6f1eec-5386-445d-8700-eb6af297dc28" path="/var/lib/kubelet/pods/fe6f1eec-5386-445d-8700-eb6af297dc28/volumes" Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.729952 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:44 crc kubenswrapper[4788]: W1211 09:44:44.812990 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6679f5b3_4784_41d8_8475_fc65b77bb7c7.slice/crio-15d66d6723ed0ee0034ef2a10a81c4b40a690419d16e5e69cdc47112263bc8b9 WatchSource:0}: Error finding container 15d66d6723ed0ee0034ef2a10a81c4b40a690419d16e5e69cdc47112263bc8b9: Status 404 returned error can't find the container with id 15d66d6723ed0ee0034ef2a10a81c4b40a690419d16e5e69cdc47112263bc8b9 Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.831444 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 11 09:44:44 crc kubenswrapper[4788]: W1211 09:44:44.932165 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c2390d5_fa1d_4ce5_8b02_17424a908eff.slice/crio-c8bcb2d634c17ec5f6aa9fdf924f037322bc188ecb3ecac793846609dfbbdf82 WatchSource:0}: Error finding container c8bcb2d634c17ec5f6aa9fdf924f037322bc188ecb3ecac793846609dfbbdf82: Status 404 returned error can't find the container with id c8bcb2d634c17ec5f6aa9fdf924f037322bc188ecb3ecac793846609dfbbdf82 Dec 11 09:44:44 crc kubenswrapper[4788]: I1211 09:44:44.934856 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:45 crc kubenswrapper[4788]: I1211 09:44:45.061102 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 11 09:44:45 crc kubenswrapper[4788]: W1211 09:44:45.064109 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod300a8660_46c4_426b_b4a2_3f713fe639b2.slice/crio-883dc50a4b67d118b2b9a2206f425f592e203e2ccfbddd9f7eba7b9e4d8905a2 WatchSource:0}: Error finding container 883dc50a4b67d118b2b9a2206f425f592e203e2ccfbddd9f7eba7b9e4d8905a2: Status 404 returned error can't find the container with id 883dc50a4b67d118b2b9a2206f425f592e203e2ccfbddd9f7eba7b9e4d8905a2 Dec 11 09:44:45 crc kubenswrapper[4788]: I1211 09:44:45.751884 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"6679f5b3-4784-41d8-8475-fc65b77bb7c7","Type":"ContainerStarted","Data":"3372410b07c120c66dc557b8f9172c48c550ce18c77b41138e00794ef1a4039c"} Dec 11 09:44:45 crc kubenswrapper[4788]: I1211 09:44:45.752488 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"6679f5b3-4784-41d8-8475-fc65b77bb7c7","Type":"ContainerStarted","Data":"15d66d6723ed0ee0034ef2a10a81c4b40a690419d16e5e69cdc47112263bc8b9"} Dec 11 09:44:45 crc kubenswrapper[4788]: I1211 09:44:45.753443 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 11 09:44:45 crc kubenswrapper[4788]: I1211 09:44:45.757175 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"300a8660-46c4-426b-b4a2-3f713fe639b2","Type":"ContainerStarted","Data":"883dc50a4b67d118b2b9a2206f425f592e203e2ccfbddd9f7eba7b9e4d8905a2"} Dec 11 09:44:45 crc kubenswrapper[4788]: I1211 09:44:45.763444 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c2390d5-fa1d-4ce5-8b02-17424a908eff","Type":"ContainerStarted","Data":"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379"} Dec 11 09:44:45 crc kubenswrapper[4788]: I1211 09:44:45.763498 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c2390d5-fa1d-4ce5-8b02-17424a908eff","Type":"ContainerStarted","Data":"c8bcb2d634c17ec5f6aa9fdf924f037322bc188ecb3ecac793846609dfbbdf82"} Dec 11 09:44:45 crc kubenswrapper[4788]: I1211 09:44:45.782796 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.3405773610000002 podStartE2EDuration="2.78276594s" podCreationTimestamp="2025-12-11 09:44:43 +0000 UTC" firstStartedPulling="2025-12-11 09:44:44.825063047 +0000 UTC m=+1414.895842633" lastFinishedPulling="2025-12-11 09:44:45.267251626 +0000 UTC m=+1415.338031212" observedRunningTime="2025-12-11 09:44:45.772719077 +0000 UTC m=+1415.843498683" watchObservedRunningTime="2025-12-11 09:44:45.78276594 +0000 UTC m=+1415.853545536" Dec 11 09:44:46 crc kubenswrapper[4788]: I1211 09:44:46.775905 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"300a8660-46c4-426b-b4a2-3f713fe639b2","Type":"ContainerStarted","Data":"48c5d044ae2e52cd7dd97ae2e53a863625ca13798b3d3df62fcf43a7fe136d62"} Dec 11 09:44:46 crc kubenswrapper[4788]: I1211 09:44:46.776502 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"300a8660-46c4-426b-b4a2-3f713fe639b2","Type":"ContainerStarted","Data":"6c5f0f57f8df006cdc69b4784dba8f82e4919ebd38ccae6e5edf0b42ce7c3d2c"} Dec 11 09:44:46 crc kubenswrapper[4788]: I1211 09:44:46.776524 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 11 09:44:46 crc kubenswrapper[4788]: I1211 09:44:46.779069 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c2390d5-fa1d-4ce5-8b02-17424a908eff","Type":"ContainerStarted","Data":"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957"} Dec 11 09:44:46 crc kubenswrapper[4788]: I1211 09:44:46.802633 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.802609547 podStartE2EDuration="3.802609547s" podCreationTimestamp="2025-12-11 09:44:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:46.791826447 +0000 UTC m=+1416.862606043" watchObservedRunningTime="2025-12-11 09:44:46.802609547 +0000 UTC m=+1416.873389133" Dec 11 09:44:47 crc kubenswrapper[4788]: I1211 09:44:47.472090 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:49 crc kubenswrapper[4788]: I1211 09:44:49.809862 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c2390d5-fa1d-4ce5-8b02-17424a908eff","Type":"ContainerStarted","Data":"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1"} Dec 11 09:44:50 crc kubenswrapper[4788]: I1211 09:44:50.406017 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7db769bcbd-mnjsv" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.365378 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-564d966fb9-4l95x" Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.439955 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-dbdd8b984-2qbs8"] Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.440628 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-dbdd8b984-2qbs8" podUID="b81fec6d-3244-43be-8a39-922194f72574" containerName="neutron-api" containerID="cri-o://dc13560bdae7328c85ce0f94de2a07998fdc8c63238857fb80c6ba5f8b925e80" gracePeriod=30 Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.441155 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-dbdd8b984-2qbs8" podUID="b81fec6d-3244-43be-8a39-922194f72574" containerName="neutron-httpd" containerID="cri-o://40867a8e053a95a8dacb5f0f1681376d343de24bd312723b1e8327515bfe6b4b" gracePeriod=30 Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.830886 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c2390d5-fa1d-4ce5-8b02-17424a908eff","Type":"ContainerStarted","Data":"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8"} Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.831056 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="ceilometer-central-agent" containerID="cri-o://fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379" gracePeriod=30 Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.831110 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.831118 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="proxy-httpd" containerID="cri-o://fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8" gracePeriod=30 Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.831187 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="ceilometer-notification-agent" containerID="cri-o://2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957" gracePeriod=30 Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.831432 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="sg-core" containerID="cri-o://0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1" gracePeriod=30 Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.845279 4788 generic.go:334] "Generic (PLEG): container finished" podID="b81fec6d-3244-43be-8a39-922194f72574" containerID="40867a8e053a95a8dacb5f0f1681376d343de24bd312723b1e8327515bfe6b4b" exitCode=0 Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.845360 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdd8b984-2qbs8" event={"ID":"b81fec6d-3244-43be-8a39-922194f72574","Type":"ContainerDied","Data":"40867a8e053a95a8dacb5f0f1681376d343de24bd312723b1e8327515bfe6b4b"} Dec 11 09:44:51 crc kubenswrapper[4788]: I1211 09:44:51.868544 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.661383589 podStartE2EDuration="8.868520173s" podCreationTimestamp="2025-12-11 09:44:43 +0000 UTC" firstStartedPulling="2025-12-11 09:44:44.935204947 +0000 UTC m=+1415.005984533" lastFinishedPulling="2025-12-11 09:44:51.142341531 +0000 UTC m=+1421.213121117" observedRunningTime="2025-12-11 09:44:51.861428808 +0000 UTC m=+1421.932208404" watchObservedRunningTime="2025-12-11 09:44:51.868520173 +0000 UTC m=+1421.939299759" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.819312 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.857343 4788 generic.go:334] "Generic (PLEG): container finished" podID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerID="fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8" exitCode=0 Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.857381 4788 generic.go:334] "Generic (PLEG): container finished" podID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerID="0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1" exitCode=2 Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.857393 4788 generic.go:334] "Generic (PLEG): container finished" podID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerID="2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957" exitCode=0 Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.857383 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c2390d5-fa1d-4ce5-8b02-17424a908eff","Type":"ContainerDied","Data":"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8"} Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.857437 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.857405 4788 generic.go:334] "Generic (PLEG): container finished" podID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerID="fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379" exitCode=0 Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.857470 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c2390d5-fa1d-4ce5-8b02-17424a908eff","Type":"ContainerDied","Data":"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1"} Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.857513 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c2390d5-fa1d-4ce5-8b02-17424a908eff","Type":"ContainerDied","Data":"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957"} Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.857524 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c2390d5-fa1d-4ce5-8b02-17424a908eff","Type":"ContainerDied","Data":"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379"} Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.857534 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c2390d5-fa1d-4ce5-8b02-17424a908eff","Type":"ContainerDied","Data":"c8bcb2d634c17ec5f6aa9fdf924f037322bc188ecb3ecac793846609dfbbdf82"} Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.857552 4788 scope.go:117] "RemoveContainer" containerID="fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.886884 4788 scope.go:117] "RemoveContainer" containerID="0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.922463 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-run-httpd\") pod \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.922522 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-ceilometer-tls-certs\") pod \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.922582 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-log-httpd\") pod \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.922615 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-sg-core-conf-yaml\") pod \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.922671 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-combined-ca-bundle\") pod \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.922700 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-scripts\") pod \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.922800 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-config-data\") pod \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.922824 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p69b\" (UniqueName: \"kubernetes.io/projected/7c2390d5-fa1d-4ce5-8b02-17424a908eff-kube-api-access-5p69b\") pod \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\" (UID: \"7c2390d5-fa1d-4ce5-8b02-17424a908eff\") " Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.924082 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7c2390d5-fa1d-4ce5-8b02-17424a908eff" (UID: "7c2390d5-fa1d-4ce5-8b02-17424a908eff"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.924369 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7c2390d5-fa1d-4ce5-8b02-17424a908eff" (UID: "7c2390d5-fa1d-4ce5-8b02-17424a908eff"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.928404 4788 scope.go:117] "RemoveContainer" containerID="2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.932431 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c2390d5-fa1d-4ce5-8b02-17424a908eff-kube-api-access-5p69b" (OuterVolumeSpecName: "kube-api-access-5p69b") pod "7c2390d5-fa1d-4ce5-8b02-17424a908eff" (UID: "7c2390d5-fa1d-4ce5-8b02-17424a908eff"). InnerVolumeSpecName "kube-api-access-5p69b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.933205 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-scripts" (OuterVolumeSpecName: "scripts") pod "7c2390d5-fa1d-4ce5-8b02-17424a908eff" (UID: "7c2390d5-fa1d-4ce5-8b02-17424a908eff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.952804 4788 scope.go:117] "RemoveContainer" containerID="fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.963205 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7c2390d5-fa1d-4ce5-8b02-17424a908eff" (UID: "7c2390d5-fa1d-4ce5-8b02-17424a908eff"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.986486 4788 scope.go:117] "RemoveContainer" containerID="fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8" Dec 11 09:44:52 crc kubenswrapper[4788]: E1211 09:44:52.987352 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8\": container with ID starting with fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8 not found: ID does not exist" containerID="fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.987388 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8"} err="failed to get container status \"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8\": rpc error: code = NotFound desc = could not find container \"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8\": container with ID starting with fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.987411 4788 scope.go:117] "RemoveContainer" containerID="0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1" Dec 11 09:44:52 crc kubenswrapper[4788]: E1211 09:44:52.987729 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1\": container with ID starting with 0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1 not found: ID does not exist" containerID="0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.987761 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1"} err="failed to get container status \"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1\": rpc error: code = NotFound desc = could not find container \"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1\": container with ID starting with 0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.987779 4788 scope.go:117] "RemoveContainer" containerID="2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957" Dec 11 09:44:52 crc kubenswrapper[4788]: E1211 09:44:52.988848 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957\": container with ID starting with 2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957 not found: ID does not exist" containerID="2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.988900 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957"} err="failed to get container status \"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957\": rpc error: code = NotFound desc = could not find container \"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957\": container with ID starting with 2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.988920 4788 scope.go:117] "RemoveContainer" containerID="fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379" Dec 11 09:44:52 crc kubenswrapper[4788]: E1211 09:44:52.989322 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379\": container with ID starting with fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379 not found: ID does not exist" containerID="fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.989347 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379"} err="failed to get container status \"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379\": rpc error: code = NotFound desc = could not find container \"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379\": container with ID starting with fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.989361 4788 scope.go:117] "RemoveContainer" containerID="fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.989574 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8"} err="failed to get container status \"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8\": rpc error: code = NotFound desc = could not find container \"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8\": container with ID starting with fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.989600 4788 scope.go:117] "RemoveContainer" containerID="0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.989906 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1"} err="failed to get container status \"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1\": rpc error: code = NotFound desc = could not find container \"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1\": container with ID starting with 0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.989926 4788 scope.go:117] "RemoveContainer" containerID="2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.990325 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957"} err="failed to get container status \"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957\": rpc error: code = NotFound desc = could not find container \"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957\": container with ID starting with 2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.990346 4788 scope.go:117] "RemoveContainer" containerID="fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.990587 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379"} err="failed to get container status \"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379\": rpc error: code = NotFound desc = could not find container \"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379\": container with ID starting with fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.990609 4788 scope.go:117] "RemoveContainer" containerID="fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.991171 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8"} err="failed to get container status \"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8\": rpc error: code = NotFound desc = could not find container \"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8\": container with ID starting with fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.991200 4788 scope.go:117] "RemoveContainer" containerID="0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.991467 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1"} err="failed to get container status \"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1\": rpc error: code = NotFound desc = could not find container \"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1\": container with ID starting with 0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.991497 4788 scope.go:117] "RemoveContainer" containerID="2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.991769 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957"} err="failed to get container status \"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957\": rpc error: code = NotFound desc = could not find container \"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957\": container with ID starting with 2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.991797 4788 scope.go:117] "RemoveContainer" containerID="fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.992019 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379"} err="failed to get container status \"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379\": rpc error: code = NotFound desc = could not find container \"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379\": container with ID starting with fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.992043 4788 scope.go:117] "RemoveContainer" containerID="fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.992347 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8"} err="failed to get container status \"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8\": rpc error: code = NotFound desc = could not find container \"fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8\": container with ID starting with fb2f53d5d43ae5f49cf7fe847d1f89afec39d030deb0641e1a681e674970fdd8 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.992370 4788 scope.go:117] "RemoveContainer" containerID="0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.993608 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1"} err="failed to get container status \"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1\": rpc error: code = NotFound desc = could not find container \"0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1\": container with ID starting with 0f664dfc363a108b6c6067cba99fd9108dfc9c8e70e2784a1c4bfb93ef0d94f1 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.993796 4788 scope.go:117] "RemoveContainer" containerID="2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.994212 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957"} err="failed to get container status \"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957\": rpc error: code = NotFound desc = could not find container \"2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957\": container with ID starting with 2735f58afdec5285c2583cbc1492131b4593d2ec75eb09deca51fbb2367bc957 not found: ID does not exist" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.994260 4788 scope.go:117] "RemoveContainer" containerID="fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379" Dec 11 09:44:52 crc kubenswrapper[4788]: I1211 09:44:52.994548 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379"} err="failed to get container status \"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379\": rpc error: code = NotFound desc = could not find container \"fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379\": container with ID starting with fbaa4801461fc73bcbfcafdce013a7254a644f1ba319d007ece80721f4d98379 not found: ID does not exist" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.006258 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "7c2390d5-fa1d-4ce5-8b02-17424a908eff" (UID: "7c2390d5-fa1d-4ce5-8b02-17424a908eff"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.027242 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.027285 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p69b\" (UniqueName: \"kubernetes.io/projected/7c2390d5-fa1d-4ce5-8b02-17424a908eff-kube-api-access-5p69b\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.027300 4788 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.027311 4788 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.027321 4788 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c2390d5-fa1d-4ce5-8b02-17424a908eff-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.027334 4788 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.043343 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c2390d5-fa1d-4ce5-8b02-17424a908eff" (UID: "7c2390d5-fa1d-4ce5-8b02-17424a908eff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.077138 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-config-data" (OuterVolumeSpecName: "config-data") pod "7c2390d5-fa1d-4ce5-8b02-17424a908eff" (UID: "7c2390d5-fa1d-4ce5-8b02-17424a908eff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.128933 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.128962 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c2390d5-fa1d-4ce5-8b02-17424a908eff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.281472 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.291815 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.311119 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:53 crc kubenswrapper[4788]: E1211 09:44:53.311765 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="proxy-httpd" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.311789 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="proxy-httpd" Dec 11 09:44:53 crc kubenswrapper[4788]: E1211 09:44:53.311816 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="sg-core" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.311823 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="sg-core" Dec 11 09:44:53 crc kubenswrapper[4788]: E1211 09:44:53.311834 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="ceilometer-notification-agent" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.311842 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="ceilometer-notification-agent" Dec 11 09:44:53 crc kubenswrapper[4788]: E1211 09:44:53.311853 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="ceilometer-central-agent" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.311859 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="ceilometer-central-agent" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.312058 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="proxy-httpd" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.312079 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="sg-core" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.312094 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="ceilometer-notification-agent" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.312103 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" containerName="ceilometer-central-agent" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.313793 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.316035 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.316318 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.321301 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.343829 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.433975 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-log-httpd\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.435021 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.435092 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.435186 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-config-data\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.435216 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.435264 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfkn8\" (UniqueName: \"kubernetes.io/projected/b0952cf3-b2eb-4622-8250-36b9d986f885-kube-api-access-tfkn8\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.435318 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-scripts\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.435352 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-run-httpd\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.536957 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-config-data\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.537042 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.537097 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfkn8\" (UniqueName: \"kubernetes.io/projected/b0952cf3-b2eb-4622-8250-36b9d986f885-kube-api-access-tfkn8\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.537171 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-scripts\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.537215 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-run-httpd\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.537295 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-log-httpd\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.537337 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.537375 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.538467 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-log-httpd\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.539245 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-run-httpd\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.541984 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.543779 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.543970 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-config-data\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.549390 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-scripts\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.557996 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.558287 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfkn8\" (UniqueName: \"kubernetes.io/projected/b0952cf3-b2eb-4622-8250-36b9d986f885-kube-api-access-tfkn8\") pod \"ceilometer-0\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.660059 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.894417 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-6h62d"] Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.895934 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6h62d" Dec 11 09:44:53 crc kubenswrapper[4788]: I1211 09:44:53.919668 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-6h62d"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.068541 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04da6856-cf02-4691-9b29-7715109b1a69-operator-scripts\") pod \"nova-api-db-create-6h62d\" (UID: \"04da6856-cf02-4691-9b29-7715109b1a69\") " pod="openstack/nova-api-db-create-6h62d" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.069025 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2qzr\" (UniqueName: \"kubernetes.io/projected/04da6856-cf02-4691-9b29-7715109b1a69-kube-api-access-z2qzr\") pod \"nova-api-db-create-6h62d\" (UID: \"04da6856-cf02-4691-9b29-7715109b1a69\") " pod="openstack/nova-api-db-create-6h62d" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.174387 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04da6856-cf02-4691-9b29-7715109b1a69-operator-scripts\") pod \"nova-api-db-create-6h62d\" (UID: \"04da6856-cf02-4691-9b29-7715109b1a69\") " pod="openstack/nova-api-db-create-6h62d" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.174483 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2qzr\" (UniqueName: \"kubernetes.io/projected/04da6856-cf02-4691-9b29-7715109b1a69-kube-api-access-z2qzr\") pod \"nova-api-db-create-6h62d\" (UID: \"04da6856-cf02-4691-9b29-7715109b1a69\") " pod="openstack/nova-api-db-create-6h62d" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.205790 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-5fjph"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.206537 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04da6856-cf02-4691-9b29-7715109b1a69-operator-scripts\") pod \"nova-api-db-create-6h62d\" (UID: \"04da6856-cf02-4691-9b29-7715109b1a69\") " pod="openstack/nova-api-db-create-6h62d" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.208773 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5fjph" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.212738 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-5fjph"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.221142 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-9166-account-create-update-xhgcp"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.223912 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9166-account-create-update-xhgcp" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.225741 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.233857 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9166-account-create-update-xhgcp"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.245339 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-dj9dl"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.246698 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-dj9dl" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.249249 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2qzr\" (UniqueName: \"kubernetes.io/projected/04da6856-cf02-4691-9b29-7715109b1a69-kube-api-access-z2qzr\") pod \"nova-api-db-create-6h62d\" (UID: \"04da6856-cf02-4691-9b29-7715109b1a69\") " pod="openstack/nova-api-db-create-6h62d" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.264757 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.275606 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-dj9dl"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.280594 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6h62d" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.380312 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-12cc-account-create-update-58lfg"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.380963 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-operator-scripts\") pod \"nova-cell1-db-create-dj9dl\" (UID: \"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f\") " pod="openstack/nova-cell1-db-create-dj9dl" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.381096 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjqvv\" (UniqueName: \"kubernetes.io/projected/c4305d44-4730-4a83-b91f-b53bbff433bf-kube-api-access-wjqvv\") pod \"nova-cell0-db-create-5fjph\" (UID: \"c4305d44-4730-4a83-b91f-b53bbff433bf\") " pod="openstack/nova-cell0-db-create-5fjph" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.381179 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pttkz\" (UniqueName: \"kubernetes.io/projected/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-kube-api-access-pttkz\") pod \"nova-cell1-db-create-dj9dl\" (UID: \"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f\") " pod="openstack/nova-cell1-db-create-dj9dl" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.381272 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvggm\" (UniqueName: \"kubernetes.io/projected/44711a12-aa33-4cf0-a92d-b0039f5ac809-kube-api-access-mvggm\") pod \"nova-api-9166-account-create-update-xhgcp\" (UID: \"44711a12-aa33-4cf0-a92d-b0039f5ac809\") " pod="openstack/nova-api-9166-account-create-update-xhgcp" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.381307 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44711a12-aa33-4cf0-a92d-b0039f5ac809-operator-scripts\") pod \"nova-api-9166-account-create-update-xhgcp\" (UID: \"44711a12-aa33-4cf0-a92d-b0039f5ac809\") " pod="openstack/nova-api-9166-account-create-update-xhgcp" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.382385 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4305d44-4730-4a83-b91f-b53bbff433bf-operator-scripts\") pod \"nova-cell0-db-create-5fjph\" (UID: \"c4305d44-4730-4a83-b91f-b53bbff433bf\") " pod="openstack/nova-cell0-db-create-5fjph" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.382440 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-12cc-account-create-update-58lfg" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.384845 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.407904 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-12cc-account-create-update-58lfg"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.457445 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.485242 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvggm\" (UniqueName: \"kubernetes.io/projected/44711a12-aa33-4cf0-a92d-b0039f5ac809-kube-api-access-mvggm\") pod \"nova-api-9166-account-create-update-xhgcp\" (UID: \"44711a12-aa33-4cf0-a92d-b0039f5ac809\") " pod="openstack/nova-api-9166-account-create-update-xhgcp" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.485289 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44711a12-aa33-4cf0-a92d-b0039f5ac809-operator-scripts\") pod \"nova-api-9166-account-create-update-xhgcp\" (UID: \"44711a12-aa33-4cf0-a92d-b0039f5ac809\") " pod="openstack/nova-api-9166-account-create-update-xhgcp" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.485323 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03b3633f-f696-4f45-be36-8cfb754dc8e1-operator-scripts\") pod \"nova-cell0-12cc-account-create-update-58lfg\" (UID: \"03b3633f-f696-4f45-be36-8cfb754dc8e1\") " pod="openstack/nova-cell0-12cc-account-create-update-58lfg" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.485347 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r57hf\" (UniqueName: \"kubernetes.io/projected/03b3633f-f696-4f45-be36-8cfb754dc8e1-kube-api-access-r57hf\") pod \"nova-cell0-12cc-account-create-update-58lfg\" (UID: \"03b3633f-f696-4f45-be36-8cfb754dc8e1\") " pod="openstack/nova-cell0-12cc-account-create-update-58lfg" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.485428 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4305d44-4730-4a83-b91f-b53bbff433bf-operator-scripts\") pod \"nova-cell0-db-create-5fjph\" (UID: \"c4305d44-4730-4a83-b91f-b53bbff433bf\") " pod="openstack/nova-cell0-db-create-5fjph" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.485466 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-operator-scripts\") pod \"nova-cell1-db-create-dj9dl\" (UID: \"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f\") " pod="openstack/nova-cell1-db-create-dj9dl" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.485533 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjqvv\" (UniqueName: \"kubernetes.io/projected/c4305d44-4730-4a83-b91f-b53bbff433bf-kube-api-access-wjqvv\") pod \"nova-cell0-db-create-5fjph\" (UID: \"c4305d44-4730-4a83-b91f-b53bbff433bf\") " pod="openstack/nova-cell0-db-create-5fjph" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.485575 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pttkz\" (UniqueName: \"kubernetes.io/projected/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-kube-api-access-pttkz\") pod \"nova-cell1-db-create-dj9dl\" (UID: \"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f\") " pod="openstack/nova-cell1-db-create-dj9dl" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.486447 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44711a12-aa33-4cf0-a92d-b0039f5ac809-operator-scripts\") pod \"nova-api-9166-account-create-update-xhgcp\" (UID: \"44711a12-aa33-4cf0-a92d-b0039f5ac809\") " pod="openstack/nova-api-9166-account-create-update-xhgcp" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.486565 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-operator-scripts\") pod \"nova-cell1-db-create-dj9dl\" (UID: \"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f\") " pod="openstack/nova-cell1-db-create-dj9dl" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.488630 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4305d44-4730-4a83-b91f-b53bbff433bf-operator-scripts\") pod \"nova-cell0-db-create-5fjph\" (UID: \"c4305d44-4730-4a83-b91f-b53bbff433bf\") " pod="openstack/nova-cell0-db-create-5fjph" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.532026 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjqvv\" (UniqueName: \"kubernetes.io/projected/c4305d44-4730-4a83-b91f-b53bbff433bf-kube-api-access-wjqvv\") pod \"nova-cell0-db-create-5fjph\" (UID: \"c4305d44-4730-4a83-b91f-b53bbff433bf\") " pod="openstack/nova-cell0-db-create-5fjph" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.532094 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvggm\" (UniqueName: \"kubernetes.io/projected/44711a12-aa33-4cf0-a92d-b0039f5ac809-kube-api-access-mvggm\") pod \"nova-api-9166-account-create-update-xhgcp\" (UID: \"44711a12-aa33-4cf0-a92d-b0039f5ac809\") " pod="openstack/nova-api-9166-account-create-update-xhgcp" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.535907 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pttkz\" (UniqueName: \"kubernetes.io/projected/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-kube-api-access-pttkz\") pod \"nova-cell1-db-create-dj9dl\" (UID: \"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f\") " pod="openstack/nova-cell1-db-create-dj9dl" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.545545 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c2390d5-fa1d-4ce5-8b02-17424a908eff" path="/var/lib/kubelet/pods/7c2390d5-fa1d-4ce5-8b02-17424a908eff/volumes" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.589675 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03b3633f-f696-4f45-be36-8cfb754dc8e1-operator-scripts\") pod \"nova-cell0-12cc-account-create-update-58lfg\" (UID: \"03b3633f-f696-4f45-be36-8cfb754dc8e1\") " pod="openstack/nova-cell0-12cc-account-create-update-58lfg" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.589728 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r57hf\" (UniqueName: \"kubernetes.io/projected/03b3633f-f696-4f45-be36-8cfb754dc8e1-kube-api-access-r57hf\") pod \"nova-cell0-12cc-account-create-update-58lfg\" (UID: \"03b3633f-f696-4f45-be36-8cfb754dc8e1\") " pod="openstack/nova-cell0-12cc-account-create-update-58lfg" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.592083 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03b3633f-f696-4f45-be36-8cfb754dc8e1-operator-scripts\") pod \"nova-cell0-12cc-account-create-update-58lfg\" (UID: \"03b3633f-f696-4f45-be36-8cfb754dc8e1\") " pod="openstack/nova-cell0-12cc-account-create-update-58lfg" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.593296 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-26f5-account-create-update-25256"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.594740 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-26f5-account-create-update-25256" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.600715 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5fjph" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.602347 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.613044 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-26f5-account-create-update-25256"] Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.639598 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r57hf\" (UniqueName: \"kubernetes.io/projected/03b3633f-f696-4f45-be36-8cfb754dc8e1-kube-api-access-r57hf\") pod \"nova-cell0-12cc-account-create-update-58lfg\" (UID: \"03b3633f-f696-4f45-be36-8cfb754dc8e1\") " pod="openstack/nova-cell0-12cc-account-create-update-58lfg" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.691100 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjkjr\" (UniqueName: \"kubernetes.io/projected/44c661c8-dd11-4b37-9fbe-eead4f672645-kube-api-access-zjkjr\") pod \"nova-cell1-26f5-account-create-update-25256\" (UID: \"44c661c8-dd11-4b37-9fbe-eead4f672645\") " pod="openstack/nova-cell1-26f5-account-create-update-25256" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.691401 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44c661c8-dd11-4b37-9fbe-eead4f672645-operator-scripts\") pod \"nova-cell1-26f5-account-create-update-25256\" (UID: \"44c661c8-dd11-4b37-9fbe-eead4f672645\") " pod="openstack/nova-cell1-26f5-account-create-update-25256" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.731573 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9166-account-create-update-xhgcp" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.790579 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-dj9dl" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.795266 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44c661c8-dd11-4b37-9fbe-eead4f672645-operator-scripts\") pod \"nova-cell1-26f5-account-create-update-25256\" (UID: \"44c661c8-dd11-4b37-9fbe-eead4f672645\") " pod="openstack/nova-cell1-26f5-account-create-update-25256" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.795757 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjkjr\" (UniqueName: \"kubernetes.io/projected/44c661c8-dd11-4b37-9fbe-eead4f672645-kube-api-access-zjkjr\") pod \"nova-cell1-26f5-account-create-update-25256\" (UID: \"44c661c8-dd11-4b37-9fbe-eead4f672645\") " pod="openstack/nova-cell1-26f5-account-create-update-25256" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.797472 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44c661c8-dd11-4b37-9fbe-eead4f672645-operator-scripts\") pod \"nova-cell1-26f5-account-create-update-25256\" (UID: \"44c661c8-dd11-4b37-9fbe-eead4f672645\") " pod="openstack/nova-cell1-26f5-account-create-update-25256" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.818206 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjkjr\" (UniqueName: \"kubernetes.io/projected/44c661c8-dd11-4b37-9fbe-eead4f672645-kube-api-access-zjkjr\") pod \"nova-cell1-26f5-account-create-update-25256\" (UID: \"44c661c8-dd11-4b37-9fbe-eead4f672645\") " pod="openstack/nova-cell1-26f5-account-create-update-25256" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.833196 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-12cc-account-create-update-58lfg" Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.925222 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0952cf3-b2eb-4622-8250-36b9d986f885","Type":"ContainerStarted","Data":"f9ace9261b5aed307ca8cfd64d764efe1442b75396e010a1ab09a5cbe727c4d7"} Dec 11 09:44:54 crc kubenswrapper[4788]: I1211 09:44:54.945865 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-26f5-account-create-update-25256" Dec 11 09:44:55 crc kubenswrapper[4788]: I1211 09:44:55.167672 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-6h62d"] Dec 11 09:44:55 crc kubenswrapper[4788]: W1211 09:44:55.210016 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04da6856_cf02_4691_9b29_7715109b1a69.slice/crio-a6af04cbd88331cd85998c687fe6f4e86fa247061161c9f9e9aa637575af724d WatchSource:0}: Error finding container a6af04cbd88331cd85998c687fe6f4e86fa247061161c9f9e9aa637575af724d: Status 404 returned error can't find the container with id a6af04cbd88331cd85998c687fe6f4e86fa247061161c9f9e9aa637575af724d Dec 11 09:44:55 crc kubenswrapper[4788]: I1211 09:44:55.377994 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-5fjph"] Dec 11 09:44:55 crc kubenswrapper[4788]: W1211 09:44:55.398650 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4305d44_4730_4a83_b91f_b53bbff433bf.slice/crio-910a762278f161ce93c0c00d3fcb7f72dc91672f736e76af5482b54a5c464b0d WatchSource:0}: Error finding container 910a762278f161ce93c0c00d3fcb7f72dc91672f736e76af5482b54a5c464b0d: Status 404 returned error can't find the container with id 910a762278f161ce93c0c00d3fcb7f72dc91672f736e76af5482b54a5c464b0d Dec 11 09:44:55 crc kubenswrapper[4788]: I1211 09:44:55.673056 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9166-account-create-update-xhgcp"] Dec 11 09:44:55 crc kubenswrapper[4788]: I1211 09:44:55.963906 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5fjph" event={"ID":"c4305d44-4730-4a83-b91f-b53bbff433bf","Type":"ContainerStarted","Data":"d235260cd5be3c632bd85fef26825f1a0c9126dfc4b4edfb754cd66f5a8e40f1"} Dec 11 09:44:55 crc kubenswrapper[4788]: I1211 09:44:55.964287 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5fjph" event={"ID":"c4305d44-4730-4a83-b91f-b53bbff433bf","Type":"ContainerStarted","Data":"910a762278f161ce93c0c00d3fcb7f72dc91672f736e76af5482b54a5c464b0d"} Dec 11 09:44:55 crc kubenswrapper[4788]: I1211 09:44:55.981528 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9166-account-create-update-xhgcp" event={"ID":"44711a12-aa33-4cf0-a92d-b0039f5ac809","Type":"ContainerStarted","Data":"cd19e537e00c2b68841494e85f78c633039d598039f0b1931b969349f9b025e9"} Dec 11 09:44:55 crc kubenswrapper[4788]: I1211 09:44:55.994356 4788 generic.go:334] "Generic (PLEG): container finished" podID="b81fec6d-3244-43be-8a39-922194f72574" containerID="dc13560bdae7328c85ce0f94de2a07998fdc8c63238857fb80c6ba5f8b925e80" exitCode=0 Dec 11 09:44:55 crc kubenswrapper[4788]: I1211 09:44:55.994457 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdd8b984-2qbs8" event={"ID":"b81fec6d-3244-43be-8a39-922194f72574","Type":"ContainerDied","Data":"dc13560bdae7328c85ce0f94de2a07998fdc8c63238857fb80c6ba5f8b925e80"} Dec 11 09:44:55 crc kubenswrapper[4788]: I1211 09:44:55.994571 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-5fjph" podStartSLOduration=1.994548319 podStartE2EDuration="1.994548319s" podCreationTimestamp="2025-12-11 09:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:55.97868963 +0000 UTC m=+1426.049469216" watchObservedRunningTime="2025-12-11 09:44:55.994548319 +0000 UTC m=+1426.065327915" Dec 11 09:44:55 crc kubenswrapper[4788]: I1211 09:44:55.997565 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6h62d" event={"ID":"04da6856-cf02-4691-9b29-7715109b1a69","Type":"ContainerStarted","Data":"a626f043b768ee6c833701a226f907a61d2f8f6f59e57623ca2f9511843cdcc2"} Dec 11 09:44:55 crc kubenswrapper[4788]: I1211 09:44:55.997599 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6h62d" event={"ID":"04da6856-cf02-4691-9b29-7715109b1a69","Type":"ContainerStarted","Data":"a6af04cbd88331cd85998c687fe6f4e86fa247061161c9f9e9aa637575af724d"} Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.038219 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-12cc-account-create-update-58lfg"] Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.060036 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-6h62d" podStartSLOduration=3.060010251 podStartE2EDuration="3.060010251s" podCreationTimestamp="2025-12-11 09:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:56.022240643 +0000 UTC m=+1426.093020229" watchObservedRunningTime="2025-12-11 09:44:56.060010251 +0000 UTC m=+1426.130789847" Dec 11 09:44:56 crc kubenswrapper[4788]: W1211 09:44:56.066810 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f1fbc52_492f_4e49_a8d9_b5ab20c0022f.slice/crio-24c8301435dd9469ab2dcce29f782bea0aa07b553e7ab546eecff0cf81eab75b WatchSource:0}: Error finding container 24c8301435dd9469ab2dcce29f782bea0aa07b553e7ab546eecff0cf81eab75b: Status 404 returned error can't find the container with id 24c8301435dd9469ab2dcce29f782bea0aa07b553e7ab546eecff0cf81eab75b Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.079735 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-26f5-account-create-update-25256"] Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.091110 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-dj9dl"] Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.550205 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.592131 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cpt6\" (UniqueName: \"kubernetes.io/projected/b81fec6d-3244-43be-8a39-922194f72574-kube-api-access-7cpt6\") pod \"b81fec6d-3244-43be-8a39-922194f72574\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.592368 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-httpd-config\") pod \"b81fec6d-3244-43be-8a39-922194f72574\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.592460 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-ovndb-tls-certs\") pod \"b81fec6d-3244-43be-8a39-922194f72574\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.592482 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-config\") pod \"b81fec6d-3244-43be-8a39-922194f72574\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.592522 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-combined-ca-bundle\") pod \"b81fec6d-3244-43be-8a39-922194f72574\" (UID: \"b81fec6d-3244-43be-8a39-922194f72574\") " Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.616843 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "b81fec6d-3244-43be-8a39-922194f72574" (UID: "b81fec6d-3244-43be-8a39-922194f72574"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.628642 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b81fec6d-3244-43be-8a39-922194f72574-kube-api-access-7cpt6" (OuterVolumeSpecName: "kube-api-access-7cpt6") pod "b81fec6d-3244-43be-8a39-922194f72574" (UID: "b81fec6d-3244-43be-8a39-922194f72574"). InnerVolumeSpecName "kube-api-access-7cpt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.694446 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cpt6\" (UniqueName: \"kubernetes.io/projected/b81fec6d-3244-43be-8a39-922194f72574-kube-api-access-7cpt6\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.694489 4788 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.776014 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-config" (OuterVolumeSpecName: "config") pod "b81fec6d-3244-43be-8a39-922194f72574" (UID: "b81fec6d-3244-43be-8a39-922194f72574"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.785696 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b81fec6d-3244-43be-8a39-922194f72574" (UID: "b81fec6d-3244-43be-8a39-922194f72574"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.796586 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.796618 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.827371 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "b81fec6d-3244-43be-8a39-922194f72574" (UID: "b81fec6d-3244-43be-8a39-922194f72574"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:44:56 crc kubenswrapper[4788]: I1211 09:44:56.899062 4788 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b81fec6d-3244-43be-8a39-922194f72574-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.036944 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-12cc-account-create-update-58lfg" event={"ID":"03b3633f-f696-4f45-be36-8cfb754dc8e1","Type":"ContainerStarted","Data":"7348082d43fafb12129c0829726e9c150022000d1c73684ee0c16b9144fdc518"} Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.036997 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-12cc-account-create-update-58lfg" event={"ID":"03b3633f-f696-4f45-be36-8cfb754dc8e1","Type":"ContainerStarted","Data":"7ce4f923da5c2fc5b22c7a0337336a16bf4bb2cedf41dd8b0de05ca7cfa2daf5"} Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.045271 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dbdd8b984-2qbs8" event={"ID":"b81fec6d-3244-43be-8a39-922194f72574","Type":"ContainerDied","Data":"e4a3366f02969ec0cdafc0d1d9e1b36b50072a79edd80089aab615344932743d"} Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.045355 4788 scope.go:117] "RemoveContainer" containerID="40867a8e053a95a8dacb5f0f1681376d343de24bd312723b1e8327515bfe6b4b" Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.045570 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dbdd8b984-2qbs8" Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.057292 4788 generic.go:334] "Generic (PLEG): container finished" podID="04da6856-cf02-4691-9b29-7715109b1a69" containerID="a626f043b768ee6c833701a226f907a61d2f8f6f59e57623ca2f9511843cdcc2" exitCode=0 Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.057364 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6h62d" event={"ID":"04da6856-cf02-4691-9b29-7715109b1a69","Type":"ContainerDied","Data":"a626f043b768ee6c833701a226f907a61d2f8f6f59e57623ca2f9511843cdcc2"} Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.069470 4788 generic.go:334] "Generic (PLEG): container finished" podID="44c661c8-dd11-4b37-9fbe-eead4f672645" containerID="4613b52a96d4be613e81ea879c29bdf45de16f2be884cd9fd1d1adb658548319" exitCode=0 Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.069546 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-26f5-account-create-update-25256" event={"ID":"44c661c8-dd11-4b37-9fbe-eead4f672645","Type":"ContainerDied","Data":"4613b52a96d4be613e81ea879c29bdf45de16f2be884cd9fd1d1adb658548319"} Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.069576 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-26f5-account-create-update-25256" event={"ID":"44c661c8-dd11-4b37-9fbe-eead4f672645","Type":"ContainerStarted","Data":"29e0f4536b0fa0b8a6f033b79bfcbe5b7de8b212dfab3f37b43779ff37697a9a"} Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.071719 4788 generic.go:334] "Generic (PLEG): container finished" podID="0f1fbc52-492f-4e49-a8d9-b5ab20c0022f" containerID="7e85ba6862fe40433404b00293d9e988e3c34dd9089d09be6fed73687447f017" exitCode=0 Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.071806 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-dj9dl" event={"ID":"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f","Type":"ContainerDied","Data":"7e85ba6862fe40433404b00293d9e988e3c34dd9089d09be6fed73687447f017"} Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.071838 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-dj9dl" event={"ID":"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f","Type":"ContainerStarted","Data":"24c8301435dd9469ab2dcce29f782bea0aa07b553e7ab546eecff0cf81eab75b"} Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.073800 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0952cf3-b2eb-4622-8250-36b9d986f885","Type":"ContainerStarted","Data":"ee45f53e198e092a71c6fa8f1366f23e44d58b4c20a96b3808fdf49deacc62e9"} Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.081541 4788 generic.go:334] "Generic (PLEG): container finished" podID="c4305d44-4730-4a83-b91f-b53bbff433bf" containerID="d235260cd5be3c632bd85fef26825f1a0c9126dfc4b4edfb754cd66f5a8e40f1" exitCode=0 Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.081647 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5fjph" event={"ID":"c4305d44-4730-4a83-b91f-b53bbff433bf","Type":"ContainerDied","Data":"d235260cd5be3c632bd85fef26825f1a0c9126dfc4b4edfb754cd66f5a8e40f1"} Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.087709 4788 generic.go:334] "Generic (PLEG): container finished" podID="44711a12-aa33-4cf0-a92d-b0039f5ac809" containerID="60de6d6cb9a869f19f150f4fb24f6cf5211f00c18854517bbabd98513c15c10e" exitCode=0 Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.087768 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9166-account-create-update-xhgcp" event={"ID":"44711a12-aa33-4cf0-a92d-b0039f5ac809","Type":"ContainerDied","Data":"60de6d6cb9a869f19f150f4fb24f6cf5211f00c18854517bbabd98513c15c10e"} Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.089772 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-12cc-account-create-update-58lfg" podStartSLOduration=3.089746819 podStartE2EDuration="3.089746819s" podCreationTimestamp="2025-12-11 09:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:44:57.064690556 +0000 UTC m=+1427.135470152" watchObservedRunningTime="2025-12-11 09:44:57.089746819 +0000 UTC m=+1427.160526405" Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.159581 4788 scope.go:117] "RemoveContainer" containerID="dc13560bdae7328c85ce0f94de2a07998fdc8c63238857fb80c6ba5f8b925e80" Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.245462 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-dbdd8b984-2qbs8"] Dec 11 09:44:57 crc kubenswrapper[4788]: I1211 09:44:57.256290 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-dbdd8b984-2qbs8"] Dec 11 09:44:58 crc kubenswrapper[4788]: I1211 09:44:58.111554 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0952cf3-b2eb-4622-8250-36b9d986f885","Type":"ContainerStarted","Data":"b75dd6d552bf03a2a8a7f1c8c4436626377aceccad3ac9dd57446af63d10c6ef"} Dec 11 09:44:58 crc kubenswrapper[4788]: I1211 09:44:58.113648 4788 generic.go:334] "Generic (PLEG): container finished" podID="03b3633f-f696-4f45-be36-8cfb754dc8e1" containerID="7348082d43fafb12129c0829726e9c150022000d1c73684ee0c16b9144fdc518" exitCode=0 Dec 11 09:44:58 crc kubenswrapper[4788]: I1211 09:44:58.113713 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-12cc-account-create-update-58lfg" event={"ID":"03b3633f-f696-4f45-be36-8cfb754dc8e1","Type":"ContainerDied","Data":"7348082d43fafb12129c0829726e9c150022000d1c73684ee0c16b9144fdc518"} Dec 11 09:44:58 crc kubenswrapper[4788]: I1211 09:44:58.276481 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 11 09:44:58 crc kubenswrapper[4788]: I1211 09:44:58.535497 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b81fec6d-3244-43be-8a39-922194f72574" path="/var/lib/kubelet/pods/b81fec6d-3244-43be-8a39-922194f72574/volumes" Dec 11 09:44:58 crc kubenswrapper[4788]: I1211 09:44:58.979775 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-26f5-account-create-update-25256" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.043727 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9166-account-create-update-xhgcp" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.057518 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44711a12-aa33-4cf0-a92d-b0039f5ac809-operator-scripts\") pod \"44711a12-aa33-4cf0-a92d-b0039f5ac809\" (UID: \"44711a12-aa33-4cf0-a92d-b0039f5ac809\") " Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.057882 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjkjr\" (UniqueName: \"kubernetes.io/projected/44c661c8-dd11-4b37-9fbe-eead4f672645-kube-api-access-zjkjr\") pod \"44c661c8-dd11-4b37-9fbe-eead4f672645\" (UID: \"44c661c8-dd11-4b37-9fbe-eead4f672645\") " Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.057986 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44c661c8-dd11-4b37-9fbe-eead4f672645-operator-scripts\") pod \"44c661c8-dd11-4b37-9fbe-eead4f672645\" (UID: \"44c661c8-dd11-4b37-9fbe-eead4f672645\") " Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.058066 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvggm\" (UniqueName: \"kubernetes.io/projected/44711a12-aa33-4cf0-a92d-b0039f5ac809-kube-api-access-mvggm\") pod \"44711a12-aa33-4cf0-a92d-b0039f5ac809\" (UID: \"44711a12-aa33-4cf0-a92d-b0039f5ac809\") " Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.061872 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44711a12-aa33-4cf0-a92d-b0039f5ac809-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "44711a12-aa33-4cf0-a92d-b0039f5ac809" (UID: "44711a12-aa33-4cf0-a92d-b0039f5ac809"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.062664 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44c661c8-dd11-4b37-9fbe-eead4f672645-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "44c661c8-dd11-4b37-9fbe-eead4f672645" (UID: "44c661c8-dd11-4b37-9fbe-eead4f672645"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.067950 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44711a12-aa33-4cf0-a92d-b0039f5ac809-kube-api-access-mvggm" (OuterVolumeSpecName: "kube-api-access-mvggm") pod "44711a12-aa33-4cf0-a92d-b0039f5ac809" (UID: "44711a12-aa33-4cf0-a92d-b0039f5ac809"). InnerVolumeSpecName "kube-api-access-mvggm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.068082 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44c661c8-dd11-4b37-9fbe-eead4f672645-kube-api-access-zjkjr" (OuterVolumeSpecName: "kube-api-access-zjkjr") pod "44c661c8-dd11-4b37-9fbe-eead4f672645" (UID: "44c661c8-dd11-4b37-9fbe-eead4f672645"). InnerVolumeSpecName "kube-api-access-zjkjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.138651 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0952cf3-b2eb-4622-8250-36b9d986f885","Type":"ContainerStarted","Data":"dd1063423070f2593daadead099b521c57425e56dfc9106781b48bb64b37227b"} Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.140155 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9166-account-create-update-xhgcp" event={"ID":"44711a12-aa33-4cf0-a92d-b0039f5ac809","Type":"ContainerDied","Data":"cd19e537e00c2b68841494e85f78c633039d598039f0b1931b969349f9b025e9"} Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.140179 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd19e537e00c2b68841494e85f78c633039d598039f0b1931b969349f9b025e9" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.140333 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9166-account-create-update-xhgcp" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.161919 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvggm\" (UniqueName: \"kubernetes.io/projected/44711a12-aa33-4cf0-a92d-b0039f5ac809-kube-api-access-mvggm\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.161959 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44711a12-aa33-4cf0-a92d-b0039f5ac809-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.161972 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjkjr\" (UniqueName: \"kubernetes.io/projected/44c661c8-dd11-4b37-9fbe-eead4f672645-kube-api-access-zjkjr\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.161983 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44c661c8-dd11-4b37-9fbe-eead4f672645-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.165190 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-26f5-account-create-update-25256" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.165506 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-26f5-account-create-update-25256" event={"ID":"44c661c8-dd11-4b37-9fbe-eead4f672645","Type":"ContainerDied","Data":"29e0f4536b0fa0b8a6f033b79bfcbe5b7de8b212dfab3f37b43779ff37697a9a"} Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.165564 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29e0f4536b0fa0b8a6f033b79bfcbe5b7de8b212dfab3f37b43779ff37697a9a" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.371735 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-dj9dl" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.397891 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5fjph" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.400920 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6h62d" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.482940 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pttkz\" (UniqueName: \"kubernetes.io/projected/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-kube-api-access-pttkz\") pod \"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f\" (UID: \"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f\") " Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.485154 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2qzr\" (UniqueName: \"kubernetes.io/projected/04da6856-cf02-4691-9b29-7715109b1a69-kube-api-access-z2qzr\") pod \"04da6856-cf02-4691-9b29-7715109b1a69\" (UID: \"04da6856-cf02-4691-9b29-7715109b1a69\") " Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.485214 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-operator-scripts\") pod \"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f\" (UID: \"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f\") " Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.485371 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4305d44-4730-4a83-b91f-b53bbff433bf-operator-scripts\") pod \"c4305d44-4730-4a83-b91f-b53bbff433bf\" (UID: \"c4305d44-4730-4a83-b91f-b53bbff433bf\") " Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.485434 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjqvv\" (UniqueName: \"kubernetes.io/projected/c4305d44-4730-4a83-b91f-b53bbff433bf-kube-api-access-wjqvv\") pod \"c4305d44-4730-4a83-b91f-b53bbff433bf\" (UID: \"c4305d44-4730-4a83-b91f-b53bbff433bf\") " Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.485502 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04da6856-cf02-4691-9b29-7715109b1a69-operator-scripts\") pod \"04da6856-cf02-4691-9b29-7715109b1a69\" (UID: \"04da6856-cf02-4691-9b29-7715109b1a69\") " Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.486098 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0f1fbc52-492f-4e49-a8d9-b5ab20c0022f" (UID: "0f1fbc52-492f-4e49-a8d9-b5ab20c0022f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.486490 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4305d44-4730-4a83-b91f-b53bbff433bf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c4305d44-4730-4a83-b91f-b53bbff433bf" (UID: "c4305d44-4730-4a83-b91f-b53bbff433bf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.487033 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04da6856-cf02-4691-9b29-7715109b1a69-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "04da6856-cf02-4691-9b29-7715109b1a69" (UID: "04da6856-cf02-4691-9b29-7715109b1a69"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.488256 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04da6856-cf02-4691-9b29-7715109b1a69-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.488285 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.488297 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4305d44-4730-4a83-b91f-b53bbff433bf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.494880 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4305d44-4730-4a83-b91f-b53bbff433bf-kube-api-access-wjqvv" (OuterVolumeSpecName: "kube-api-access-wjqvv") pod "c4305d44-4730-4a83-b91f-b53bbff433bf" (UID: "c4305d44-4730-4a83-b91f-b53bbff433bf"). InnerVolumeSpecName "kube-api-access-wjqvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.496763 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-kube-api-access-pttkz" (OuterVolumeSpecName: "kube-api-access-pttkz") pod "0f1fbc52-492f-4e49-a8d9-b5ab20c0022f" (UID: "0f1fbc52-492f-4e49-a8d9-b5ab20c0022f"). InnerVolumeSpecName "kube-api-access-pttkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.498143 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04da6856-cf02-4691-9b29-7715109b1a69-kube-api-access-z2qzr" (OuterVolumeSpecName: "kube-api-access-z2qzr") pod "04da6856-cf02-4691-9b29-7715109b1a69" (UID: "04da6856-cf02-4691-9b29-7715109b1a69"). InnerVolumeSpecName "kube-api-access-z2qzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.590996 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjqvv\" (UniqueName: \"kubernetes.io/projected/c4305d44-4730-4a83-b91f-b53bbff433bf-kube-api-access-wjqvv\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.591050 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pttkz\" (UniqueName: \"kubernetes.io/projected/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f-kube-api-access-pttkz\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.591065 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2qzr\" (UniqueName: \"kubernetes.io/projected/04da6856-cf02-4691-9b29-7715109b1a69-kube-api-access-z2qzr\") on node \"crc\" DevicePath \"\"" Dec 11 09:44:59 crc kubenswrapper[4788]: I1211 09:44:59.998255 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-12cc-account-create-update-58lfg" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.106349 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r57hf\" (UniqueName: \"kubernetes.io/projected/03b3633f-f696-4f45-be36-8cfb754dc8e1-kube-api-access-r57hf\") pod \"03b3633f-f696-4f45-be36-8cfb754dc8e1\" (UID: \"03b3633f-f696-4f45-be36-8cfb754dc8e1\") " Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.106436 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03b3633f-f696-4f45-be36-8cfb754dc8e1-operator-scripts\") pod \"03b3633f-f696-4f45-be36-8cfb754dc8e1\" (UID: \"03b3633f-f696-4f45-be36-8cfb754dc8e1\") " Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.107424 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03b3633f-f696-4f45-be36-8cfb754dc8e1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "03b3633f-f696-4f45-be36-8cfb754dc8e1" (UID: "03b3633f-f696-4f45-be36-8cfb754dc8e1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.121655 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03b3633f-f696-4f45-be36-8cfb754dc8e1-kube-api-access-r57hf" (OuterVolumeSpecName: "kube-api-access-r57hf") pod "03b3633f-f696-4f45-be36-8cfb754dc8e1" (UID: "03b3633f-f696-4f45-be36-8cfb754dc8e1"). InnerVolumeSpecName "kube-api-access-r57hf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.163859 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j"] Dec 11 09:45:00 crc kubenswrapper[4788]: E1211 09:45:00.164416 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44711a12-aa33-4cf0-a92d-b0039f5ac809" containerName="mariadb-account-create-update" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164429 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="44711a12-aa33-4cf0-a92d-b0039f5ac809" containerName="mariadb-account-create-update" Dec 11 09:45:00 crc kubenswrapper[4788]: E1211 09:45:00.164447 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b81fec6d-3244-43be-8a39-922194f72574" containerName="neutron-httpd" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164453 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b81fec6d-3244-43be-8a39-922194f72574" containerName="neutron-httpd" Dec 11 09:45:00 crc kubenswrapper[4788]: E1211 09:45:00.164469 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04da6856-cf02-4691-9b29-7715109b1a69" containerName="mariadb-database-create" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164477 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="04da6856-cf02-4691-9b29-7715109b1a69" containerName="mariadb-database-create" Dec 11 09:45:00 crc kubenswrapper[4788]: E1211 09:45:00.164486 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03b3633f-f696-4f45-be36-8cfb754dc8e1" containerName="mariadb-account-create-update" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164492 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="03b3633f-f696-4f45-be36-8cfb754dc8e1" containerName="mariadb-account-create-update" Dec 11 09:45:00 crc kubenswrapper[4788]: E1211 09:45:00.164507 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f1fbc52-492f-4e49-a8d9-b5ab20c0022f" containerName="mariadb-database-create" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164513 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f1fbc52-492f-4e49-a8d9-b5ab20c0022f" containerName="mariadb-database-create" Dec 11 09:45:00 crc kubenswrapper[4788]: E1211 09:45:00.164533 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44c661c8-dd11-4b37-9fbe-eead4f672645" containerName="mariadb-account-create-update" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164538 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="44c661c8-dd11-4b37-9fbe-eead4f672645" containerName="mariadb-account-create-update" Dec 11 09:45:00 crc kubenswrapper[4788]: E1211 09:45:00.164551 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4305d44-4730-4a83-b91f-b53bbff433bf" containerName="mariadb-database-create" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164556 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4305d44-4730-4a83-b91f-b53bbff433bf" containerName="mariadb-database-create" Dec 11 09:45:00 crc kubenswrapper[4788]: E1211 09:45:00.164576 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b81fec6d-3244-43be-8a39-922194f72574" containerName="neutron-api" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164582 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b81fec6d-3244-43be-8a39-922194f72574" containerName="neutron-api" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164776 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b81fec6d-3244-43be-8a39-922194f72574" containerName="neutron-httpd" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164793 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="44c661c8-dd11-4b37-9fbe-eead4f672645" containerName="mariadb-account-create-update" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164804 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b81fec6d-3244-43be-8a39-922194f72574" containerName="neutron-api" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164820 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4305d44-4730-4a83-b91f-b53bbff433bf" containerName="mariadb-database-create" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164828 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="03b3633f-f696-4f45-be36-8cfb754dc8e1" containerName="mariadb-account-create-update" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164838 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="44711a12-aa33-4cf0-a92d-b0039f5ac809" containerName="mariadb-account-create-update" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164851 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f1fbc52-492f-4e49-a8d9-b5ab20c0022f" containerName="mariadb-database-create" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.164864 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="04da6856-cf02-4691-9b29-7715109b1a69" containerName="mariadb-database-create" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.165638 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.169898 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.169970 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.180491 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-12cc-account-create-update-58lfg" event={"ID":"03b3633f-f696-4f45-be36-8cfb754dc8e1","Type":"ContainerDied","Data":"7ce4f923da5c2fc5b22c7a0337336a16bf4bb2cedf41dd8b0de05ca7cfa2daf5"} Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.180546 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ce4f923da5c2fc5b22c7a0337336a16bf4bb2cedf41dd8b0de05ca7cfa2daf5" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.180627 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-12cc-account-create-update-58lfg" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.192170 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6h62d" event={"ID":"04da6856-cf02-4691-9b29-7715109b1a69","Type":"ContainerDied","Data":"a6af04cbd88331cd85998c687fe6f4e86fa247061161c9f9e9aa637575af724d"} Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.192206 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6af04cbd88331cd85998c687fe6f4e86fa247061161c9f9e9aa637575af724d" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.192279 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6h62d" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.206914 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"4bae27c1-f73d-4bdb-91a2-185dd601bc33","Type":"ContainerStarted","Data":"8960fc57118a10b4eab1503eeeaa438161e5ab5118466d46303d27e71c2a8923"} Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.208998 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdkx4\" (UniqueName: \"kubernetes.io/projected/51837483-7a31-4aeb-9854-7a317f91fd06-kube-api-access-mdkx4\") pod \"collect-profiles-29424105-kzc6j\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.210330 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51837483-7a31-4aeb-9854-7a317f91fd06-config-volume\") pod \"collect-profiles-29424105-kzc6j\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.210567 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51837483-7a31-4aeb-9854-7a317f91fd06-secret-volume\") pod \"collect-profiles-29424105-kzc6j\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.210920 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r57hf\" (UniqueName: \"kubernetes.io/projected/03b3633f-f696-4f45-be36-8cfb754dc8e1-kube-api-access-r57hf\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.211113 4788 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03b3633f-f696-4f45-be36-8cfb754dc8e1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.216117 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j"] Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.223583 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-dj9dl" event={"ID":"0f1fbc52-492f-4e49-a8d9-b5ab20c0022f","Type":"ContainerDied","Data":"24c8301435dd9469ab2dcce29f782bea0aa07b553e7ab546eecff0cf81eab75b"} Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.223623 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24c8301435dd9469ab2dcce29f782bea0aa07b553e7ab546eecff0cf81eab75b" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.223705 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-dj9dl" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.237870 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5fjph" event={"ID":"c4305d44-4730-4a83-b91f-b53bbff433bf","Type":"ContainerDied","Data":"910a762278f161ce93c0c00d3fcb7f72dc91672f736e76af5482b54a5c464b0d"} Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.237914 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="910a762278f161ce93c0c00d3fcb7f72dc91672f736e76af5482b54a5c464b0d" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.237997 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5fjph" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.273623 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.329360733 podStartE2EDuration="42.273601492s" podCreationTimestamp="2025-12-11 09:44:18 +0000 UTC" firstStartedPulling="2025-12-11 09:44:19.737129529 +0000 UTC m=+1389.807909115" lastFinishedPulling="2025-12-11 09:44:58.681370288 +0000 UTC m=+1428.752149874" observedRunningTime="2025-12-11 09:45:00.231749369 +0000 UTC m=+1430.302528955" watchObservedRunningTime="2025-12-11 09:45:00.273601492 +0000 UTC m=+1430.344381078" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.312635 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdkx4\" (UniqueName: \"kubernetes.io/projected/51837483-7a31-4aeb-9854-7a317f91fd06-kube-api-access-mdkx4\") pod \"collect-profiles-29424105-kzc6j\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.312712 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51837483-7a31-4aeb-9854-7a317f91fd06-config-volume\") pod \"collect-profiles-29424105-kzc6j\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.312799 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51837483-7a31-4aeb-9854-7a317f91fd06-secret-volume\") pod \"collect-profiles-29424105-kzc6j\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.314525 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51837483-7a31-4aeb-9854-7a317f91fd06-config-volume\") pod \"collect-profiles-29424105-kzc6j\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.335864 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51837483-7a31-4aeb-9854-7a317f91fd06-secret-volume\") pod \"collect-profiles-29424105-kzc6j\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.342174 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdkx4\" (UniqueName: \"kubernetes.io/projected/51837483-7a31-4aeb-9854-7a317f91fd06-kube-api-access-mdkx4\") pod \"collect-profiles-29424105-kzc6j\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.406214 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7db769bcbd-mnjsv" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.406716 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:45:00 crc kubenswrapper[4788]: I1211 09:45:00.614592 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:01 crc kubenswrapper[4788]: W1211 09:45:01.246189 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51837483_7a31_4aeb_9854_7a317f91fd06.slice/crio-f1924a460a8d1a6683903d080df50fd1f05503fcd6148d2d13dce401781ab48c WatchSource:0}: Error finding container f1924a460a8d1a6683903d080df50fd1f05503fcd6148d2d13dce401781ab48c: Status 404 returned error can't find the container with id f1924a460a8d1a6683903d080df50fd1f05503fcd6148d2d13dce401781ab48c Dec 11 09:45:01 crc kubenswrapper[4788]: I1211 09:45:01.251369 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j"] Dec 11 09:45:02 crc kubenswrapper[4788]: I1211 09:45:02.262849 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0952cf3-b2eb-4622-8250-36b9d986f885","Type":"ContainerStarted","Data":"f68777ddc92d03b49c7bb215dc0cde1cb1e1a1285848841c5b7fae31fc426ca0"} Dec 11 09:45:02 crc kubenswrapper[4788]: I1211 09:45:02.263004 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 11 09:45:02 crc kubenswrapper[4788]: I1211 09:45:02.265460 4788 generic.go:334] "Generic (PLEG): container finished" podID="51837483-7a31-4aeb-9854-7a317f91fd06" containerID="09fab16fa9a14e119cf6a9e1f76aa4ec2cfbb312782ff53cc1b4ef24bae91946" exitCode=0 Dec 11 09:45:02 crc kubenswrapper[4788]: I1211 09:45:02.265511 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" event={"ID":"51837483-7a31-4aeb-9854-7a317f91fd06","Type":"ContainerDied","Data":"09fab16fa9a14e119cf6a9e1f76aa4ec2cfbb312782ff53cc1b4ef24bae91946"} Dec 11 09:45:02 crc kubenswrapper[4788]: I1211 09:45:02.265542 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" event={"ID":"51837483-7a31-4aeb-9854-7a317f91fd06","Type":"ContainerStarted","Data":"f1924a460a8d1a6683903d080df50fd1f05503fcd6148d2d13dce401781ab48c"} Dec 11 09:45:02 crc kubenswrapper[4788]: I1211 09:45:02.295652 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.682695189 podStartE2EDuration="9.295632837s" podCreationTimestamp="2025-12-11 09:44:53 +0000 UTC" firstStartedPulling="2025-12-11 09:44:54.459043563 +0000 UTC m=+1424.529823149" lastFinishedPulling="2025-12-11 09:45:01.071981211 +0000 UTC m=+1431.142760797" observedRunningTime="2025-12-11 09:45:02.282744177 +0000 UTC m=+1432.353523763" watchObservedRunningTime="2025-12-11 09:45:02.295632837 +0000 UTC m=+1432.366412423" Dec 11 09:45:02 crc kubenswrapper[4788]: I1211 09:45:02.440555 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.058619 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l2r89"] Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.069893 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.081526 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l2r89"] Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.181448 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-catalog-content\") pod \"redhat-operators-l2r89\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.181572 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-utilities\") pod \"redhat-operators-l2r89\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.181618 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkqhq\" (UniqueName: \"kubernetes.io/projected/444902a0-9cad-4a39-8e23-9c56956e80e8-kube-api-access-rkqhq\") pod \"redhat-operators-l2r89\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.285401 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-catalog-content\") pod \"redhat-operators-l2r89\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.285533 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-utilities\") pod \"redhat-operators-l2r89\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.285574 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkqhq\" (UniqueName: \"kubernetes.io/projected/444902a0-9cad-4a39-8e23-9c56956e80e8-kube-api-access-rkqhq\") pod \"redhat-operators-l2r89\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.286006 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-catalog-content\") pod \"redhat-operators-l2r89\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.286434 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-utilities\") pod \"redhat-operators-l2r89\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.325541 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkqhq\" (UniqueName: \"kubernetes.io/projected/444902a0-9cad-4a39-8e23-9c56956e80e8-kube-api-access-rkqhq\") pod \"redhat-operators-l2r89\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.396946 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.833040 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:03 crc kubenswrapper[4788]: I1211 09:45:03.991402 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l2r89"] Dec 11 09:45:04 crc kubenswrapper[4788]: W1211 09:45:04.004538 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod444902a0_9cad_4a39_8e23_9c56956e80e8.slice/crio-315a7974fd9bf41b2a4bbc61708dde4758af114854536bd2f7d6f280fbf315f8 WatchSource:0}: Error finding container 315a7974fd9bf41b2a4bbc61708dde4758af114854536bd2f7d6f280fbf315f8: Status 404 returned error can't find the container with id 315a7974fd9bf41b2a4bbc61708dde4758af114854536bd2f7d6f280fbf315f8 Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.004780 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51837483-7a31-4aeb-9854-7a317f91fd06-config-volume\") pod \"51837483-7a31-4aeb-9854-7a317f91fd06\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.004897 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51837483-7a31-4aeb-9854-7a317f91fd06-secret-volume\") pod \"51837483-7a31-4aeb-9854-7a317f91fd06\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.005133 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdkx4\" (UniqueName: \"kubernetes.io/projected/51837483-7a31-4aeb-9854-7a317f91fd06-kube-api-access-mdkx4\") pod \"51837483-7a31-4aeb-9854-7a317f91fd06\" (UID: \"51837483-7a31-4aeb-9854-7a317f91fd06\") " Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.005841 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51837483-7a31-4aeb-9854-7a317f91fd06-config-volume" (OuterVolumeSpecName: "config-volume") pod "51837483-7a31-4aeb-9854-7a317f91fd06" (UID: "51837483-7a31-4aeb-9854-7a317f91fd06"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.006010 4788 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/51837483-7a31-4aeb-9854-7a317f91fd06-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.017896 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51837483-7a31-4aeb-9854-7a317f91fd06-kube-api-access-mdkx4" (OuterVolumeSpecName: "kube-api-access-mdkx4") pod "51837483-7a31-4aeb-9854-7a317f91fd06" (UID: "51837483-7a31-4aeb-9854-7a317f91fd06"). InnerVolumeSpecName "kube-api-access-mdkx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.018466 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51837483-7a31-4aeb-9854-7a317f91fd06-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "51837483-7a31-4aeb-9854-7a317f91fd06" (UID: "51837483-7a31-4aeb-9854-7a317f91fd06"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.107622 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdkx4\" (UniqueName: \"kubernetes.io/projected/51837483-7a31-4aeb-9854-7a317f91fd06-kube-api-access-mdkx4\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.108082 4788 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/51837483-7a31-4aeb-9854-7a317f91fd06-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.297528 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2r89" event={"ID":"444902a0-9cad-4a39-8e23-9c56956e80e8","Type":"ContainerStarted","Data":"315a7974fd9bf41b2a4bbc61708dde4758af114854536bd2f7d6f280fbf315f8"} Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.303522 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.303691 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j" event={"ID":"51837483-7a31-4aeb-9854-7a317f91fd06","Type":"ContainerDied","Data":"f1924a460a8d1a6683903d080df50fd1f05503fcd6148d2d13dce401781ab48c"} Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.309000 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1924a460a8d1a6683903d080df50fd1f05503fcd6148d2d13dce401781ab48c" Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.304363 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="sg-core" containerID="cri-o://dd1063423070f2593daadead099b521c57425e56dfc9106781b48bb64b37227b" gracePeriod=30 Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.303770 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="proxy-httpd" containerID="cri-o://f68777ddc92d03b49c7bb215dc0cde1cb1e1a1285848841c5b7fae31fc426ca0" gracePeriod=30 Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.304389 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="ceilometer-notification-agent" containerID="cri-o://b75dd6d552bf03a2a8a7f1c8c4436626377aceccad3ac9dd57446af63d10c6ef" gracePeriod=30 Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.301220 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="ceilometer-central-agent" containerID="cri-o://ee45f53e198e092a71c6fa8f1366f23e44d58b4c20a96b3808fdf49deacc62e9" gracePeriod=30 Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.996743 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hzdhz"] Dec 11 09:45:04 crc kubenswrapper[4788]: E1211 09:45:04.999327 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51837483-7a31-4aeb-9854-7a317f91fd06" containerName="collect-profiles" Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.999352 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="51837483-7a31-4aeb-9854-7a317f91fd06" containerName="collect-profiles" Dec 11 09:45:04 crc kubenswrapper[4788]: I1211 09:45:04.999634 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="51837483-7a31-4aeb-9854-7a317f91fd06" containerName="collect-profiles" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.004332 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.012688 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.012742 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.016724 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hzdhz"] Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.017647 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-6f9hq" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.141112 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-scripts\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.141249 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-config-data\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.141295 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgc6n\" (UniqueName: \"kubernetes.io/projected/50a543f4-3907-44d1-8dca-7c180fc4dab2-kube-api-access-sgc6n\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.141581 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.244958 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.245092 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-scripts\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.245167 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-config-data\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.245205 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgc6n\" (UniqueName: \"kubernetes.io/projected/50a543f4-3907-44d1-8dca-7c180fc4dab2-kube-api-access-sgc6n\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.252365 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-scripts\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.253052 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.255619 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-config-data\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.274951 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgc6n\" (UniqueName: \"kubernetes.io/projected/50a543f4-3907-44d1-8dca-7c180fc4dab2-kube-api-access-sgc6n\") pod \"nova-cell0-conductor-db-sync-hzdhz\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.322131 4788 generic.go:334] "Generic (PLEG): container finished" podID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerID="72010a1f7a57b545f29717a8fe4161c71457bb11fdbaad6855c991d6da0ab1b2" exitCode=137 Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.322218 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db769bcbd-mnjsv" event={"ID":"a9231129-6aaf-4d8e-83fe-cc79ba9d135b","Type":"ContainerDied","Data":"72010a1f7a57b545f29717a8fe4161c71457bb11fdbaad6855c991d6da0ab1b2"} Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.326065 4788 generic.go:334] "Generic (PLEG): container finished" podID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerID="f68777ddc92d03b49c7bb215dc0cde1cb1e1a1285848841c5b7fae31fc426ca0" exitCode=0 Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.326101 4788 generic.go:334] "Generic (PLEG): container finished" podID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerID="dd1063423070f2593daadead099b521c57425e56dfc9106781b48bb64b37227b" exitCode=2 Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.326111 4788 generic.go:334] "Generic (PLEG): container finished" podID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerID="b75dd6d552bf03a2a8a7f1c8c4436626377aceccad3ac9dd57446af63d10c6ef" exitCode=0 Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.326159 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0952cf3-b2eb-4622-8250-36b9d986f885","Type":"ContainerDied","Data":"f68777ddc92d03b49c7bb215dc0cde1cb1e1a1285848841c5b7fae31fc426ca0"} Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.326190 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0952cf3-b2eb-4622-8250-36b9d986f885","Type":"ContainerDied","Data":"dd1063423070f2593daadead099b521c57425e56dfc9106781b48bb64b37227b"} Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.326204 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0952cf3-b2eb-4622-8250-36b9d986f885","Type":"ContainerDied","Data":"b75dd6d552bf03a2a8a7f1c8c4436626377aceccad3ac9dd57446af63d10c6ef"} Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.328453 4788 generic.go:334] "Generic (PLEG): container finished" podID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerID="9254f98245e1962faea7c66be640930019610f6c34a07af9cd03b4c683e2ba9a" exitCode=0 Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.328489 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2r89" event={"ID":"444902a0-9cad-4a39-8e23-9c56956e80e8","Type":"ContainerDied","Data":"9254f98245e1962faea7c66be640930019610f6c34a07af9cd03b4c683e2ba9a"} Dec 11 09:45:05 crc kubenswrapper[4788]: I1211 09:45:05.381989 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.100410 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.126630 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-secret-key\") pod \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.126837 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-combined-ca-bundle\") pod \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.126896 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-config-data\") pod \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.126920 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6jsx\" (UniqueName: \"kubernetes.io/projected/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-kube-api-access-l6jsx\") pod \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.127043 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-tls-certs\") pod \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.127074 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-logs\") pod \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.127142 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-scripts\") pod \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\" (UID: \"a9231129-6aaf-4d8e-83fe-cc79ba9d135b\") " Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.128148 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-logs" (OuterVolumeSpecName: "logs") pod "a9231129-6aaf-4d8e-83fe-cc79ba9d135b" (UID: "a9231129-6aaf-4d8e-83fe-cc79ba9d135b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.139451 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a9231129-6aaf-4d8e-83fe-cc79ba9d135b" (UID: "a9231129-6aaf-4d8e-83fe-cc79ba9d135b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.139951 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-kube-api-access-l6jsx" (OuterVolumeSpecName: "kube-api-access-l6jsx") pod "a9231129-6aaf-4d8e-83fe-cc79ba9d135b" (UID: "a9231129-6aaf-4d8e-83fe-cc79ba9d135b"). InnerVolumeSpecName "kube-api-access-l6jsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.170613 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-scripts" (OuterVolumeSpecName: "scripts") pod "a9231129-6aaf-4d8e-83fe-cc79ba9d135b" (UID: "a9231129-6aaf-4d8e-83fe-cc79ba9d135b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.189664 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9231129-6aaf-4d8e-83fe-cc79ba9d135b" (UID: "a9231129-6aaf-4d8e-83fe-cc79ba9d135b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.190247 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-config-data" (OuterVolumeSpecName: "config-data") pod "a9231129-6aaf-4d8e-83fe-cc79ba9d135b" (UID: "a9231129-6aaf-4d8e-83fe-cc79ba9d135b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.228997 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hzdhz"] Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.229420 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.229554 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.229635 4788 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.229709 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.230088 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.230197 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6jsx\" (UniqueName: \"kubernetes.io/projected/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-kube-api-access-l6jsx\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.230067 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "a9231129-6aaf-4d8e-83fe-cc79ba9d135b" (UID: "a9231129-6aaf-4d8e-83fe-cc79ba9d135b"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.332563 4788 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9231129-6aaf-4d8e-83fe-cc79ba9d135b-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.343741 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hzdhz" event={"ID":"50a543f4-3907-44d1-8dca-7c180fc4dab2","Type":"ContainerStarted","Data":"9949d50d4f9e2351778ad66f1236513c58861ae9c207935d2f121354ec8c740c"} Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.346676 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7db769bcbd-mnjsv" event={"ID":"a9231129-6aaf-4d8e-83fe-cc79ba9d135b","Type":"ContainerDied","Data":"c0b5f233936f440d88cab19ad462ad85eee2d1c786738679f0c023936915636d"} Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.346726 4788 scope.go:117] "RemoveContainer" containerID="1639642baf74b2716e381bba0e64b2517d8c67a0998e8c81296f78c615d937a7" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.346954 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7db769bcbd-mnjsv" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.402292 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7db769bcbd-mnjsv"] Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.413661 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7db769bcbd-mnjsv"] Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.515277 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" path="/var/lib/kubelet/pods/a9231129-6aaf-4d8e-83fe-cc79ba9d135b/volumes" Dec 11 09:45:06 crc kubenswrapper[4788]: I1211 09:45:06.564422 4788 scope.go:117] "RemoveContainer" containerID="72010a1f7a57b545f29717a8fe4161c71457bb11fdbaad6855c991d6da0ab1b2" Dec 11 09:45:09 crc kubenswrapper[4788]: I1211 09:45:09.420030 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2r89" event={"ID":"444902a0-9cad-4a39-8e23-9c56956e80e8","Type":"ContainerStarted","Data":"0a2fc35ac268f28cca323ef73d1f7ebc83dcf6c8c42aa114ba315444bf8ce463"} Dec 11 09:45:21 crc kubenswrapper[4788]: I1211 09:45:21.538042 4788 generic.go:334] "Generic (PLEG): container finished" podID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerID="0a2fc35ac268f28cca323ef73d1f7ebc83dcf6c8c42aa114ba315444bf8ce463" exitCode=0 Dec 11 09:45:21 crc kubenswrapper[4788]: I1211 09:45:21.538135 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2r89" event={"ID":"444902a0-9cad-4a39-8e23-9c56956e80e8","Type":"ContainerDied","Data":"0a2fc35ac268f28cca323ef73d1f7ebc83dcf6c8c42aa114ba315444bf8ce463"} Dec 11 09:45:22 crc kubenswrapper[4788]: I1211 09:45:22.551130 4788 generic.go:334] "Generic (PLEG): container finished" podID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerID="ee45f53e198e092a71c6fa8f1366f23e44d58b4c20a96b3808fdf49deacc62e9" exitCode=0 Dec 11 09:45:22 crc kubenswrapper[4788]: I1211 09:45:22.551556 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0952cf3-b2eb-4622-8250-36b9d986f885","Type":"ContainerDied","Data":"ee45f53e198e092a71c6fa8f1366f23e44d58b4c20a96b3808fdf49deacc62e9"} Dec 11 09:45:23 crc kubenswrapper[4788]: I1211 09:45:23.663039 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.171:3000/\": dial tcp 10.217.0.171:3000: connect: connection refused" Dec 11 09:45:30 crc kubenswrapper[4788]: E1211 09:45:30.888338 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified" Dec 11 09:45:30 crc kubenswrapper[4788]: E1211 09:45:30.889068 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:nova-cell0-conductor-db-sync,Image:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CELL_NAME,Value:cell0,ValueFrom:nil,},EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:false,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/kolla/config_files/config.json,SubPath:nova-conductor-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sgc6n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42436,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-cell0-conductor-db-sync-hzdhz_openstack(50a543f4-3907-44d1-8dca-7c180fc4dab2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 09:45:30 crc kubenswrapper[4788]: E1211 09:45:30.890303 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/nova-cell0-conductor-db-sync-hzdhz" podUID="50a543f4-3907-44d1-8dca-7c180fc4dab2" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.048360 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.180404 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-sg-core-conf-yaml\") pod \"b0952cf3-b2eb-4622-8250-36b9d986f885\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.180534 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-log-httpd\") pod \"b0952cf3-b2eb-4622-8250-36b9d986f885\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.180595 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-run-httpd\") pod \"b0952cf3-b2eb-4622-8250-36b9d986f885\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.180674 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfkn8\" (UniqueName: \"kubernetes.io/projected/b0952cf3-b2eb-4622-8250-36b9d986f885-kube-api-access-tfkn8\") pod \"b0952cf3-b2eb-4622-8250-36b9d986f885\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.180714 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-scripts\") pod \"b0952cf3-b2eb-4622-8250-36b9d986f885\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.180787 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-config-data\") pod \"b0952cf3-b2eb-4622-8250-36b9d986f885\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.180864 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-ceilometer-tls-certs\") pod \"b0952cf3-b2eb-4622-8250-36b9d986f885\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.180926 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-combined-ca-bundle\") pod \"b0952cf3-b2eb-4622-8250-36b9d986f885\" (UID: \"b0952cf3-b2eb-4622-8250-36b9d986f885\") " Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.181031 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b0952cf3-b2eb-4622-8250-36b9d986f885" (UID: "b0952cf3-b2eb-4622-8250-36b9d986f885"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.181115 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b0952cf3-b2eb-4622-8250-36b9d986f885" (UID: "b0952cf3-b2eb-4622-8250-36b9d986f885"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.181433 4788 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.181454 4788 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0952cf3-b2eb-4622-8250-36b9d986f885-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.186304 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-scripts" (OuterVolumeSpecName: "scripts") pod "b0952cf3-b2eb-4622-8250-36b9d986f885" (UID: "b0952cf3-b2eb-4622-8250-36b9d986f885"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.188997 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0952cf3-b2eb-4622-8250-36b9d986f885-kube-api-access-tfkn8" (OuterVolumeSpecName: "kube-api-access-tfkn8") pod "b0952cf3-b2eb-4622-8250-36b9d986f885" (UID: "b0952cf3-b2eb-4622-8250-36b9d986f885"). InnerVolumeSpecName "kube-api-access-tfkn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.231120 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b0952cf3-b2eb-4622-8250-36b9d986f885" (UID: "b0952cf3-b2eb-4622-8250-36b9d986f885"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.263443 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "b0952cf3-b2eb-4622-8250-36b9d986f885" (UID: "b0952cf3-b2eb-4622-8250-36b9d986f885"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.283300 4788 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.283560 4788 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.283655 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfkn8\" (UniqueName: \"kubernetes.io/projected/b0952cf3-b2eb-4622-8250-36b9d986f885-kube-api-access-tfkn8\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.284095 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.287518 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b0952cf3-b2eb-4622-8250-36b9d986f885" (UID: "b0952cf3-b2eb-4622-8250-36b9d986f885"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.315380 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-config-data" (OuterVolumeSpecName: "config-data") pod "b0952cf3-b2eb-4622-8250-36b9d986f885" (UID: "b0952cf3-b2eb-4622-8250-36b9d986f885"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.386537 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.386859 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0952cf3-b2eb-4622-8250-36b9d986f885-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.634914 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0952cf3-b2eb-4622-8250-36b9d986f885","Type":"ContainerDied","Data":"f9ace9261b5aed307ca8cfd64d764efe1442b75396e010a1ab09a5cbe727c4d7"} Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.634949 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.634986 4788 scope.go:117] "RemoveContainer" containerID="f68777ddc92d03b49c7bb215dc0cde1cb1e1a1285848841c5b7fae31fc426ca0" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.638443 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2r89" event={"ID":"444902a0-9cad-4a39-8e23-9c56956e80e8","Type":"ContainerStarted","Data":"d2e09db09de5f82b80b78f5ff38d8dd375efb032e19b68246100dd55b6826bb1"} Dec 11 09:45:31 crc kubenswrapper[4788]: E1211 09:45:31.641638 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified\\\"\"" pod="openstack/nova-cell0-conductor-db-sync-hzdhz" podUID="50a543f4-3907-44d1-8dca-7c180fc4dab2" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.679893 4788 scope.go:117] "RemoveContainer" containerID="dd1063423070f2593daadead099b521c57425e56dfc9106781b48bb64b37227b" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.689907 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.701299 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711168 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:31 crc kubenswrapper[4788]: E1211 09:45:31.711588 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="proxy-httpd" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711607 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="proxy-httpd" Dec 11 09:45:31 crc kubenswrapper[4788]: E1211 09:45:31.711625 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon-log" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711632 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon-log" Dec 11 09:45:31 crc kubenswrapper[4788]: E1211 09:45:31.711652 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="ceilometer-central-agent" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711660 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="ceilometer-central-agent" Dec 11 09:45:31 crc kubenswrapper[4788]: E1211 09:45:31.711679 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="sg-core" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711686 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="sg-core" Dec 11 09:45:31 crc kubenswrapper[4788]: E1211 09:45:31.711698 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="ceilometer-notification-agent" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711704 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="ceilometer-notification-agent" Dec 11 09:45:31 crc kubenswrapper[4788]: E1211 09:45:31.711717 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711723 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711908 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon-log" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711922 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="ceilometer-notification-agent" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711932 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9231129-6aaf-4d8e-83fe-cc79ba9d135b" containerName="horizon" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711938 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="ceilometer-central-agent" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711950 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="proxy-httpd" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.711963 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" containerName="sg-core" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.713628 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.714394 4788 scope.go:117] "RemoveContainer" containerID="b75dd6d552bf03a2a8a7f1c8c4436626377aceccad3ac9dd57446af63d10c6ef" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.719339 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.719633 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.719807 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.750657 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.772511 4788 scope.go:117] "RemoveContainer" containerID="ee45f53e198e092a71c6fa8f1366f23e44d58b4c20a96b3808fdf49deacc62e9" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.903761 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-run-httpd\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.903840 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.903902 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.903968 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-scripts\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.903988 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbk9b\" (UniqueName: \"kubernetes.io/projected/40719465-e3c4-471f-b044-2db1ff369a29-kube-api-access-tbk9b\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.904039 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-config-data\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.904066 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-log-httpd\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:31 crc kubenswrapper[4788]: I1211 09:45:31.904127 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.006619 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-run-httpd\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.006684 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.006733 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.006786 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-scripts\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.006807 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbk9b\" (UniqueName: \"kubernetes.io/projected/40719465-e3c4-471f-b044-2db1ff369a29-kube-api-access-tbk9b\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.006879 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-config-data\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.007306 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-log-httpd\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.007589 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.007765 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-run-httpd\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.011184 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-log-httpd\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.012087 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.012216 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.012402 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-scripts\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.012773 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.016543 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-config-data\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.033065 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbk9b\" (UniqueName: \"kubernetes.io/projected/40719465-e3c4-471f-b044-2db1ff369a29-kube-api-access-tbk9b\") pod \"ceilometer-0\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.055182 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.231534 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.232087 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c0cff116-6827-47f2-82d4-0193ebcd2b86" containerName="glance-log" containerID="cri-o://e807a7af6c8f63517b2560bd8a264b070d35c7806daed7f9013aa41ab45b18c2" gracePeriod=30 Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.232432 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c0cff116-6827-47f2-82d4-0193ebcd2b86" containerName="glance-httpd" containerID="cri-o://357596e98a25df108a6cc28d0fe67fe0795d4a64c5ba0956b0bdeec512849786" gracePeriod=30 Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.508028 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0952cf3-b2eb-4622-8250-36b9d986f885" path="/var/lib/kubelet/pods/b0952cf3-b2eb-4622-8250-36b9d986f885/volumes" Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.577592 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:32 crc kubenswrapper[4788]: W1211 09:45:32.579626 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40719465_e3c4_471f_b044_2db1ff369a29.slice/crio-f490026ab79d56d7cf7ae16bea4c6d2d619305be7bc219a8777a0cd967c6b921 WatchSource:0}: Error finding container f490026ab79d56d7cf7ae16bea4c6d2d619305be7bc219a8777a0cd967c6b921: Status 404 returned error can't find the container with id f490026ab79d56d7cf7ae16bea4c6d2d619305be7bc219a8777a0cd967c6b921 Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.650599 4788 generic.go:334] "Generic (PLEG): container finished" podID="c0cff116-6827-47f2-82d4-0193ebcd2b86" containerID="e807a7af6c8f63517b2560bd8a264b070d35c7806daed7f9013aa41ab45b18c2" exitCode=143 Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.650687 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c0cff116-6827-47f2-82d4-0193ebcd2b86","Type":"ContainerDied","Data":"e807a7af6c8f63517b2560bd8a264b070d35c7806daed7f9013aa41ab45b18c2"} Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.652157 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40719465-e3c4-471f-b044-2db1ff369a29","Type":"ContainerStarted","Data":"f490026ab79d56d7cf7ae16bea4c6d2d619305be7bc219a8777a0cd967c6b921"} Dec 11 09:45:32 crc kubenswrapper[4788]: I1211 09:45:32.674037 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l2r89" podStartSLOduration=4.103337279 podStartE2EDuration="29.67401721s" podCreationTimestamp="2025-12-11 09:45:03 +0000 UTC" firstStartedPulling="2025-12-11 09:45:05.33020965 +0000 UTC m=+1435.400989236" lastFinishedPulling="2025-12-11 09:45:30.900889591 +0000 UTC m=+1460.971669167" observedRunningTime="2025-12-11 09:45:32.6714265 +0000 UTC m=+1462.742206086" watchObservedRunningTime="2025-12-11 09:45:32.67401721 +0000 UTC m=+1462.744796796" Dec 11 09:45:33 crc kubenswrapper[4788]: I1211 09:45:33.397881 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:33 crc kubenswrapper[4788]: I1211 09:45:33.398271 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:33 crc kubenswrapper[4788]: I1211 09:45:33.667362 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40719465-e3c4-471f-b044-2db1ff369a29","Type":"ContainerStarted","Data":"5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0"} Dec 11 09:45:34 crc kubenswrapper[4788]: I1211 09:45:34.464055 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-l2r89" podUID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerName="registry-server" probeResult="failure" output=< Dec 11 09:45:34 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 09:45:34 crc kubenswrapper[4788]: > Dec 11 09:45:34 crc kubenswrapper[4788]: I1211 09:45:34.658763 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:34 crc kubenswrapper[4788]: I1211 09:45:34.678736 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40719465-e3c4-471f-b044-2db1ff369a29","Type":"ContainerStarted","Data":"f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2"} Dec 11 09:45:35 crc kubenswrapper[4788]: I1211 09:45:35.690861 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40719465-e3c4-471f-b044-2db1ff369a29","Type":"ContainerStarted","Data":"b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2"} Dec 11 09:45:35 crc kubenswrapper[4788]: I1211 09:45:35.696789 4788 generic.go:334] "Generic (PLEG): container finished" podID="c0cff116-6827-47f2-82d4-0193ebcd2b86" containerID="357596e98a25df108a6cc28d0fe67fe0795d4a64c5ba0956b0bdeec512849786" exitCode=0 Dec 11 09:45:35 crc kubenswrapper[4788]: I1211 09:45:35.696850 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c0cff116-6827-47f2-82d4-0193ebcd2b86","Type":"ContainerDied","Data":"357596e98a25df108a6cc28d0fe67fe0795d4a64c5ba0956b0bdeec512849786"} Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.233600 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.252876 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-internal-tls-certs\") pod \"c0cff116-6827-47f2-82d4-0193ebcd2b86\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.253187 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4v79\" (UniqueName: \"kubernetes.io/projected/c0cff116-6827-47f2-82d4-0193ebcd2b86-kube-api-access-k4v79\") pod \"c0cff116-6827-47f2-82d4-0193ebcd2b86\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.253484 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-scripts\") pod \"c0cff116-6827-47f2-82d4-0193ebcd2b86\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.253886 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-config-data\") pod \"c0cff116-6827-47f2-82d4-0193ebcd2b86\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.254008 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"c0cff116-6827-47f2-82d4-0193ebcd2b86\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.254257 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-logs\") pod \"c0cff116-6827-47f2-82d4-0193ebcd2b86\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.254387 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-httpd-run\") pod \"c0cff116-6827-47f2-82d4-0193ebcd2b86\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.254482 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-combined-ca-bundle\") pod \"c0cff116-6827-47f2-82d4-0193ebcd2b86\" (UID: \"c0cff116-6827-47f2-82d4-0193ebcd2b86\") " Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.254632 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-logs" (OuterVolumeSpecName: "logs") pod "c0cff116-6827-47f2-82d4-0193ebcd2b86" (UID: "c0cff116-6827-47f2-82d4-0193ebcd2b86"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.254684 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c0cff116-6827-47f2-82d4-0193ebcd2b86" (UID: "c0cff116-6827-47f2-82d4-0193ebcd2b86"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.255483 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.255588 4788 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c0cff116-6827-47f2-82d4-0193ebcd2b86-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.259760 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0cff116-6827-47f2-82d4-0193ebcd2b86-kube-api-access-k4v79" (OuterVolumeSpecName: "kube-api-access-k4v79") pod "c0cff116-6827-47f2-82d4-0193ebcd2b86" (UID: "c0cff116-6827-47f2-82d4-0193ebcd2b86"). InnerVolumeSpecName "kube-api-access-k4v79". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.281136 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-scripts" (OuterVolumeSpecName: "scripts") pod "c0cff116-6827-47f2-82d4-0193ebcd2b86" (UID: "c0cff116-6827-47f2-82d4-0193ebcd2b86"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.288478 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "c0cff116-6827-47f2-82d4-0193ebcd2b86" (UID: "c0cff116-6827-47f2-82d4-0193ebcd2b86"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.350553 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0cff116-6827-47f2-82d4-0193ebcd2b86" (UID: "c0cff116-6827-47f2-82d4-0193ebcd2b86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.354493 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-config-data" (OuterVolumeSpecName: "config-data") pod "c0cff116-6827-47f2-82d4-0193ebcd2b86" (UID: "c0cff116-6827-47f2-82d4-0193ebcd2b86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.361688 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.361747 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4v79\" (UniqueName: \"kubernetes.io/projected/c0cff116-6827-47f2-82d4-0193ebcd2b86-kube-api-access-k4v79\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.361760 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.361772 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.361811 4788 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.401849 4788 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.427159 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c0cff116-6827-47f2-82d4-0193ebcd2b86" (UID: "c0cff116-6827-47f2-82d4-0193ebcd2b86"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.463949 4788 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0cff116-6827-47f2-82d4-0193ebcd2b86-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.464296 4788 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.772419 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40719465-e3c4-471f-b044-2db1ff369a29","Type":"ContainerStarted","Data":"93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59"} Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.772612 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="ceilometer-central-agent" containerID="cri-o://5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0" gracePeriod=30 Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.773835 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.774303 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="proxy-httpd" containerID="cri-o://93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59" gracePeriod=30 Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.774403 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="sg-core" containerID="cri-o://b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2" gracePeriod=30 Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.774536 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="ceilometer-notification-agent" containerID="cri-o://f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2" gracePeriod=30 Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.780393 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c0cff116-6827-47f2-82d4-0193ebcd2b86","Type":"ContainerDied","Data":"61d85d0ef657bfea851c9078acff9faf980f82fc705418b639f40a85a78136b9"} Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.780528 4788 scope.go:117] "RemoveContainer" containerID="357596e98a25df108a6cc28d0fe67fe0795d4a64c5ba0956b0bdeec512849786" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.780708 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.821187 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.418791599 podStartE2EDuration="7.821167498s" podCreationTimestamp="2025-12-11 09:45:31 +0000 UTC" firstStartedPulling="2025-12-11 09:45:32.582734448 +0000 UTC m=+1462.653514034" lastFinishedPulling="2025-12-11 09:45:37.985110347 +0000 UTC m=+1468.055889933" observedRunningTime="2025-12-11 09:45:38.801099128 +0000 UTC m=+1468.871878734" watchObservedRunningTime="2025-12-11 09:45:38.821167498 +0000 UTC m=+1468.891947084" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.833501 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.847211 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.847613 4788 scope.go:117] "RemoveContainer" containerID="e807a7af6c8f63517b2560bd8a264b070d35c7806daed7f9013aa41ab45b18c2" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.870283 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:45:38 crc kubenswrapper[4788]: E1211 09:45:38.870695 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0cff116-6827-47f2-82d4-0193ebcd2b86" containerName="glance-log" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.870712 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0cff116-6827-47f2-82d4-0193ebcd2b86" containerName="glance-log" Dec 11 09:45:38 crc kubenswrapper[4788]: E1211 09:45:38.870729 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0cff116-6827-47f2-82d4-0193ebcd2b86" containerName="glance-httpd" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.870735 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0cff116-6827-47f2-82d4-0193ebcd2b86" containerName="glance-httpd" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.871187 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0cff116-6827-47f2-82d4-0193ebcd2b86" containerName="glance-httpd" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.871251 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0cff116-6827-47f2-82d4-0193ebcd2b86" containerName="glance-log" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.872641 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.876682 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.877013 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 11 09:45:38 crc kubenswrapper[4788]: I1211 09:45:38.884489 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.075271 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67b7dab2-3849-4249-99b5-63547063f12b-logs\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.075814 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kz4w\" (UniqueName: \"kubernetes.io/projected/67b7dab2-3849-4249-99b5-63547063f12b-kube-api-access-8kz4w\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.075865 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.075891 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67b7dab2-3849-4249-99b5-63547063f12b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.075919 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.076134 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.076193 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.076309 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.178308 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.178360 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.178397 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.178438 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67b7dab2-3849-4249-99b5-63547063f12b-logs\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.178519 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kz4w\" (UniqueName: \"kubernetes.io/projected/67b7dab2-3849-4249-99b5-63547063f12b-kube-api-access-8kz4w\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.178550 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.178571 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67b7dab2-3849-4249-99b5-63547063f12b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.178587 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.178884 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.179389 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67b7dab2-3849-4249-99b5-63547063f12b-logs\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.179571 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67b7dab2-3849-4249-99b5-63547063f12b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.188197 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.201468 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.204894 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.208168 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67b7dab2-3849-4249-99b5-63547063f12b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.212107 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kz4w\" (UniqueName: \"kubernetes.io/projected/67b7dab2-3849-4249-99b5-63547063f12b-kube-api-access-8kz4w\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.223348 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"67b7dab2-3849-4249-99b5-63547063f12b\") " pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.507606 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.801897 4788 generic.go:334] "Generic (PLEG): container finished" podID="40719465-e3c4-471f-b044-2db1ff369a29" containerID="93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59" exitCode=0 Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.802278 4788 generic.go:334] "Generic (PLEG): container finished" podID="40719465-e3c4-471f-b044-2db1ff369a29" containerID="b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2" exitCode=2 Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.802294 4788 generic.go:334] "Generic (PLEG): container finished" podID="40719465-e3c4-471f-b044-2db1ff369a29" containerID="f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2" exitCode=0 Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.802333 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40719465-e3c4-471f-b044-2db1ff369a29","Type":"ContainerDied","Data":"93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59"} Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.802366 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40719465-e3c4-471f-b044-2db1ff369a29","Type":"ContainerDied","Data":"b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2"} Dec 11 09:45:39 crc kubenswrapper[4788]: I1211 09:45:39.802380 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40719465-e3c4-471f-b044-2db1ff369a29","Type":"ContainerDied","Data":"f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2"} Dec 11 09:45:40 crc kubenswrapper[4788]: I1211 09:45:40.320485 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 11 09:45:40 crc kubenswrapper[4788]: I1211 09:45:40.517602 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0cff116-6827-47f2-82d4-0193ebcd2b86" path="/var/lib/kubelet/pods/c0cff116-6827-47f2-82d4-0193ebcd2b86/volumes" Dec 11 09:45:40 crc kubenswrapper[4788]: I1211 09:45:40.826190 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"67b7dab2-3849-4249-99b5-63547063f12b","Type":"ContainerStarted","Data":"7264eca298a52ba5cebfb7e947229811915c93fae3beb3c2cf9c95dd67f6337a"} Dec 11 09:45:41 crc kubenswrapper[4788]: I1211 09:45:41.621950 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:45:41 crc kubenswrapper[4788]: I1211 09:45:41.622285 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerName="glance-log" containerID="cri-o://9bb68d2a7ef6e2ba858a24a9a2e19ba74c9aa8bc07842ae64aee1fd42f531c21" gracePeriod=30 Dec 11 09:45:41 crc kubenswrapper[4788]: I1211 09:45:41.622435 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerName="glance-httpd" containerID="cri-o://039dd34b99f4c9129348ce12f05acbc36d6c337d228d567f2c975150e7ca3a60" gracePeriod=30 Dec 11 09:45:41 crc kubenswrapper[4788]: I1211 09:45:41.901344 4788 generic.go:334] "Generic (PLEG): container finished" podID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerID="9bb68d2a7ef6e2ba858a24a9a2e19ba74c9aa8bc07842ae64aee1fd42f531c21" exitCode=143 Dec 11 09:45:41 crc kubenswrapper[4788]: I1211 09:45:41.901695 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02366a9b-897c-4132-9981-b1c4a5dc7fc7","Type":"ContainerDied","Data":"9bb68d2a7ef6e2ba858a24a9a2e19ba74c9aa8bc07842ae64aee1fd42f531c21"} Dec 11 09:45:41 crc kubenswrapper[4788]: I1211 09:45:41.905215 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"67b7dab2-3849-4249-99b5-63547063f12b","Type":"ContainerStarted","Data":"2981546b3862a974383c72b37ae3dcc4ffc4122f2f3217481dfbc2f143ef10fa"} Dec 11 09:45:41 crc kubenswrapper[4788]: I1211 09:45:41.905277 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"67b7dab2-3849-4249-99b5-63547063f12b","Type":"ContainerStarted","Data":"62f8aef6fafd3939b776e13e6383929e1ac55fcd2d9fad2e6a61c0930228a372"} Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.453897 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.491495 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.491467063 podStartE2EDuration="5.491467063s" podCreationTimestamp="2025-12-11 09:45:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:45:41.937496375 +0000 UTC m=+1472.008275981" watchObservedRunningTime="2025-12-11 09:45:43.491467063 +0000 UTC m=+1473.562246649" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.519967 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.527013 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.596135 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-scripts\") pod \"40719465-e3c4-471f-b044-2db1ff369a29\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.596646 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-combined-ca-bundle\") pod \"40719465-e3c4-471f-b044-2db1ff369a29\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.596724 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbk9b\" (UniqueName: \"kubernetes.io/projected/40719465-e3c4-471f-b044-2db1ff369a29-kube-api-access-tbk9b\") pod \"40719465-e3c4-471f-b044-2db1ff369a29\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.596803 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-log-httpd\") pod \"40719465-e3c4-471f-b044-2db1ff369a29\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.596870 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-sg-core-conf-yaml\") pod \"40719465-e3c4-471f-b044-2db1ff369a29\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.596899 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-run-httpd\") pod \"40719465-e3c4-471f-b044-2db1ff369a29\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.596973 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-config-data\") pod \"40719465-e3c4-471f-b044-2db1ff369a29\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.596996 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-ceilometer-tls-certs\") pod \"40719465-e3c4-471f-b044-2db1ff369a29\" (UID: \"40719465-e3c4-471f-b044-2db1ff369a29\") " Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.597555 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "40719465-e3c4-471f-b044-2db1ff369a29" (UID: "40719465-e3c4-471f-b044-2db1ff369a29"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.597644 4788 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.597769 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "40719465-e3c4-471f-b044-2db1ff369a29" (UID: "40719465-e3c4-471f-b044-2db1ff369a29"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.622433 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-scripts" (OuterVolumeSpecName: "scripts") pod "40719465-e3c4-471f-b044-2db1ff369a29" (UID: "40719465-e3c4-471f-b044-2db1ff369a29"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.630452 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40719465-e3c4-471f-b044-2db1ff369a29-kube-api-access-tbk9b" (OuterVolumeSpecName: "kube-api-access-tbk9b") pod "40719465-e3c4-471f-b044-2db1ff369a29" (UID: "40719465-e3c4-471f-b044-2db1ff369a29"). InnerVolumeSpecName "kube-api-access-tbk9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.682371 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "40719465-e3c4-471f-b044-2db1ff369a29" (UID: "40719465-e3c4-471f-b044-2db1ff369a29"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.695634 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "40719465-e3c4-471f-b044-2db1ff369a29" (UID: "40719465-e3c4-471f-b044-2db1ff369a29"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.706448 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbk9b\" (UniqueName: \"kubernetes.io/projected/40719465-e3c4-471f-b044-2db1ff369a29-kube-api-access-tbk9b\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.706481 4788 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40719465-e3c4-471f-b044-2db1ff369a29-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.706491 4788 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.706499 4788 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.706508 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.707817 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l2r89"] Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.760410 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-config-data" (OuterVolumeSpecName: "config-data") pod "40719465-e3c4-471f-b044-2db1ff369a29" (UID: "40719465-e3c4-471f-b044-2db1ff369a29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.790354 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40719465-e3c4-471f-b044-2db1ff369a29" (UID: "40719465-e3c4-471f-b044-2db1ff369a29"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.808507 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.808550 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40719465-e3c4-471f-b044-2db1ff369a29-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.932411 4788 generic.go:334] "Generic (PLEG): container finished" podID="40719465-e3c4-471f-b044-2db1ff369a29" containerID="5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0" exitCode=0 Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.932491 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.932575 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40719465-e3c4-471f-b044-2db1ff369a29","Type":"ContainerDied","Data":"5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0"} Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.932610 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40719465-e3c4-471f-b044-2db1ff369a29","Type":"ContainerDied","Data":"f490026ab79d56d7cf7ae16bea4c6d2d619305be7bc219a8777a0cd967c6b921"} Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.932637 4788 scope.go:117] "RemoveContainer" containerID="93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.972294 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.980357 4788 scope.go:117] "RemoveContainer" containerID="b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2" Dec 11 09:45:43 crc kubenswrapper[4788]: I1211 09:45:43.981629 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.008631 4788 scope.go:117] "RemoveContainer" containerID="f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.010683 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:44 crc kubenswrapper[4788]: E1211 09:45:44.011212 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="ceilometer-central-agent" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.011307 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="ceilometer-central-agent" Dec 11 09:45:44 crc kubenswrapper[4788]: E1211 09:45:44.011331 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="ceilometer-notification-agent" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.011339 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="ceilometer-notification-agent" Dec 11 09:45:44 crc kubenswrapper[4788]: E1211 09:45:44.011404 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="sg-core" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.011414 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="sg-core" Dec 11 09:45:44 crc kubenswrapper[4788]: E1211 09:45:44.011436 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="proxy-httpd" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.011444 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="proxy-httpd" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.011734 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="ceilometer-notification-agent" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.011756 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="sg-core" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.011808 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="ceilometer-central-agent" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.011827 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="40719465-e3c4-471f-b044-2db1ff369a29" containerName="proxy-httpd" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.014385 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.017351 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.017428 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.017785 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.024124 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.052738 4788 scope.go:117] "RemoveContainer" containerID="5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.105977 4788 scope.go:117] "RemoveContainer" containerID="93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59" Dec 11 09:45:44 crc kubenswrapper[4788]: E1211 09:45:44.106715 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59\": container with ID starting with 93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59 not found: ID does not exist" containerID="93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.106803 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59"} err="failed to get container status \"93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59\": rpc error: code = NotFound desc = could not find container \"93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59\": container with ID starting with 93ccbbc315ab6b5b7192d2caa81338e357dcebcae22f32d2a945dd275f7a8a59 not found: ID does not exist" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.106903 4788 scope.go:117] "RemoveContainer" containerID="b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2" Dec 11 09:45:44 crc kubenswrapper[4788]: E1211 09:45:44.107479 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2\": container with ID starting with b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2 not found: ID does not exist" containerID="b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.107554 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2"} err="failed to get container status \"b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2\": rpc error: code = NotFound desc = could not find container \"b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2\": container with ID starting with b2d11f753e8c03ab2f7935795337f872ebdf11a16da21954992d3c9e6dfc61e2 not found: ID does not exist" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.107705 4788 scope.go:117] "RemoveContainer" containerID="f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2" Dec 11 09:45:44 crc kubenswrapper[4788]: E1211 09:45:44.108062 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2\": container with ID starting with f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2 not found: ID does not exist" containerID="f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.108150 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2"} err="failed to get container status \"f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2\": rpc error: code = NotFound desc = could not find container \"f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2\": container with ID starting with f7edfc91e1d14c4aeac26bf5eeae97de5fba4a98ccf2e928dd92cbcc1e78ade2 not found: ID does not exist" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.108222 4788 scope.go:117] "RemoveContainer" containerID="5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0" Dec 11 09:45:44 crc kubenswrapper[4788]: E1211 09:45:44.108599 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0\": container with ID starting with 5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0 not found: ID does not exist" containerID="5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.108677 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0"} err="failed to get container status \"5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0\": rpc error: code = NotFound desc = could not find container \"5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0\": container with ID starting with 5e374e4dec38a141ce531e3c07784e19b598a70c2568f49e7e6e12c09f01f2b0 not found: ID does not exist" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.115126 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-run-httpd\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.115182 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-scripts\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.115243 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtx7g\" (UniqueName: \"kubernetes.io/projected/c08ac5a4-1005-40b0-8953-e6b0e62a384f-kube-api-access-qtx7g\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.115532 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-log-httpd\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.115637 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.115759 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.115812 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.116048 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-config-data\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.218877 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-log-httpd\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.218963 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.218997 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.219039 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.219095 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-config-data\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.219187 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-run-httpd\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.219214 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-scripts\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.219295 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtx7g\" (UniqueName: \"kubernetes.io/projected/c08ac5a4-1005-40b0-8953-e6b0e62a384f-kube-api-access-qtx7g\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.219415 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-log-httpd\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.221983 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-run-httpd\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.228005 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.229348 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-scripts\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.229612 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.229905 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-config-data\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.245191 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.249008 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtx7g\" (UniqueName: \"kubernetes.io/projected/c08ac5a4-1005-40b0-8953-e6b0e62a384f-kube-api-access-qtx7g\") pod \"ceilometer-0\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.390317 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.515049 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40719465-e3c4-471f-b044-2db1ff369a29" path="/var/lib/kubelet/pods/40719465-e3c4-471f-b044-2db1ff369a29/volumes" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.794025 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.159:9292/healthcheck\": read tcp 10.217.0.2:43300->10.217.0.159:9292: read: connection reset by peer" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.794590 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.159:9292/healthcheck\": read tcp 10.217.0.2:43314->10.217.0.159:9292: read: connection reset by peer" Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.864405 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:44 crc kubenswrapper[4788]: W1211 09:45:44.865498 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc08ac5a4_1005_40b0_8953_e6b0e62a384f.slice/crio-710677dbcad528017b1a222ddd3d1e02337d536b6b3b016876c1ed2f3df09f77 WatchSource:0}: Error finding container 710677dbcad528017b1a222ddd3d1e02337d536b6b3b016876c1ed2f3df09f77: Status 404 returned error can't find the container with id 710677dbcad528017b1a222ddd3d1e02337d536b6b3b016876c1ed2f3df09f77 Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.944426 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l2r89" podUID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerName="registry-server" containerID="cri-o://d2e09db09de5f82b80b78f5ff38d8dd375efb032e19b68246100dd55b6826bb1" gracePeriod=2 Dec 11 09:45:44 crc kubenswrapper[4788]: I1211 09:45:44.944848 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c08ac5a4-1005-40b0-8953-e6b0e62a384f","Type":"ContainerStarted","Data":"710677dbcad528017b1a222ddd3d1e02337d536b6b3b016876c1ed2f3df09f77"} Dec 11 09:45:45 crc kubenswrapper[4788]: I1211 09:45:45.977600 4788 generic.go:334] "Generic (PLEG): container finished" podID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerID="039dd34b99f4c9129348ce12f05acbc36d6c337d228d567f2c975150e7ca3a60" exitCode=0 Dec 11 09:45:45 crc kubenswrapper[4788]: I1211 09:45:45.977649 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02366a9b-897c-4132-9981-b1c4a5dc7fc7","Type":"ContainerDied","Data":"039dd34b99f4c9129348ce12f05acbc36d6c337d228d567f2c975150e7ca3a60"} Dec 11 09:45:45 crc kubenswrapper[4788]: I1211 09:45:45.990753 4788 generic.go:334] "Generic (PLEG): container finished" podID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerID="d2e09db09de5f82b80b78f5ff38d8dd375efb032e19b68246100dd55b6826bb1" exitCode=0 Dec 11 09:45:45 crc kubenswrapper[4788]: I1211 09:45:45.990801 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2r89" event={"ID":"444902a0-9cad-4a39-8e23-9c56956e80e8","Type":"ContainerDied","Data":"d2e09db09de5f82b80b78f5ff38d8dd375efb032e19b68246100dd55b6826bb1"} Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.298372 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.329443 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.368704 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-combined-ca-bundle\") pod \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.369258 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-config-data\") pod \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.369311 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-logs\") pod \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.369340 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9p49k\" (UniqueName: \"kubernetes.io/projected/02366a9b-897c-4132-9981-b1c4a5dc7fc7-kube-api-access-9p49k\") pod \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.369417 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-scripts\") pod \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.369440 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-public-tls-certs\") pod \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.369482 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.371200 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-logs" (OuterVolumeSpecName: "logs") pod "02366a9b-897c-4132-9981-b1c4a5dc7fc7" (UID: "02366a9b-897c-4132-9981-b1c4a5dc7fc7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.380405 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-scripts" (OuterVolumeSpecName: "scripts") pod "02366a9b-897c-4132-9981-b1c4a5dc7fc7" (UID: "02366a9b-897c-4132-9981-b1c4a5dc7fc7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.382626 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "02366a9b-897c-4132-9981-b1c4a5dc7fc7" (UID: "02366a9b-897c-4132-9981-b1c4a5dc7fc7"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.398504 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02366a9b-897c-4132-9981-b1c4a5dc7fc7-kube-api-access-9p49k" (OuterVolumeSpecName: "kube-api-access-9p49k") pod "02366a9b-897c-4132-9981-b1c4a5dc7fc7" (UID: "02366a9b-897c-4132-9981-b1c4a5dc7fc7"). InnerVolumeSpecName "kube-api-access-9p49k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.447251 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02366a9b-897c-4132-9981-b1c4a5dc7fc7" (UID: "02366a9b-897c-4132-9981-b1c4a5dc7fc7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.476713 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-httpd-run\") pod \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\" (UID: \"02366a9b-897c-4132-9981-b1c4a5dc7fc7\") " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.478828 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.478923 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9p49k\" (UniqueName: \"kubernetes.io/projected/02366a9b-897c-4132-9981-b1c4a5dc7fc7-kube-api-access-9p49k\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.478946 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.478971 4788 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.478983 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.479717 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "02366a9b-897c-4132-9981-b1c4a5dc7fc7" (UID: "02366a9b-897c-4132-9981-b1c4a5dc7fc7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.480171 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-config-data" (OuterVolumeSpecName: "config-data") pod "02366a9b-897c-4132-9981-b1c4a5dc7fc7" (UID: "02366a9b-897c-4132-9981-b1c4a5dc7fc7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.532807 4788 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.573196 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "02366a9b-897c-4132-9981-b1c4a5dc7fc7" (UID: "02366a9b-897c-4132-9981-b1c4a5dc7fc7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.581286 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.581315 4788 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02366a9b-897c-4132-9981-b1c4a5dc7fc7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.581327 4788 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.581335 4788 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02366a9b-897c-4132-9981-b1c4a5dc7fc7-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.861657 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.989545 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkqhq\" (UniqueName: \"kubernetes.io/projected/444902a0-9cad-4a39-8e23-9c56956e80e8-kube-api-access-rkqhq\") pod \"444902a0-9cad-4a39-8e23-9c56956e80e8\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.989625 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-catalog-content\") pod \"444902a0-9cad-4a39-8e23-9c56956e80e8\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.989668 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-utilities\") pod \"444902a0-9cad-4a39-8e23-9c56956e80e8\" (UID: \"444902a0-9cad-4a39-8e23-9c56956e80e8\") " Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.990600 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-utilities" (OuterVolumeSpecName: "utilities") pod "444902a0-9cad-4a39-8e23-9c56956e80e8" (UID: "444902a0-9cad-4a39-8e23-9c56956e80e8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:46 crc kubenswrapper[4788]: I1211 09:45:46.993443 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/444902a0-9cad-4a39-8e23-9c56956e80e8-kube-api-access-rkqhq" (OuterVolumeSpecName: "kube-api-access-rkqhq") pod "444902a0-9cad-4a39-8e23-9c56956e80e8" (UID: "444902a0-9cad-4a39-8e23-9c56956e80e8"). InnerVolumeSpecName "kube-api-access-rkqhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.001712 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02366a9b-897c-4132-9981-b1c4a5dc7fc7","Type":"ContainerDied","Data":"8fe1d4c9e9ad7e7842296d89177f1422d780ec5e9ba7ca3fe0c4be0a81352569"} Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.001763 4788 scope.go:117] "RemoveContainer" containerID="039dd34b99f4c9129348ce12f05acbc36d6c337d228d567f2c975150e7ca3a60" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.001884 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.005486 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l2r89" event={"ID":"444902a0-9cad-4a39-8e23-9c56956e80e8","Type":"ContainerDied","Data":"315a7974fd9bf41b2a4bbc61708dde4758af114854536bd2f7d6f280fbf315f8"} Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.005518 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l2r89" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.010345 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c08ac5a4-1005-40b0-8953-e6b0e62a384f","Type":"ContainerStarted","Data":"9ffc9a1fc0074336cf683dcc90dfe8d93afa6a955dd4eac6dcc478775ade9d81"} Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.031076 4788 scope.go:117] "RemoveContainer" containerID="9bb68d2a7ef6e2ba858a24a9a2e19ba74c9aa8bc07842ae64aee1fd42f531c21" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.047435 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.059877 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.063670 4788 scope.go:117] "RemoveContainer" containerID="d2e09db09de5f82b80b78f5ff38d8dd375efb032e19b68246100dd55b6826bb1" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.069468 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:45:47 crc kubenswrapper[4788]: E1211 09:45:47.069941 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerName="extract-content" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.069968 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerName="extract-content" Dec 11 09:45:47 crc kubenswrapper[4788]: E1211 09:45:47.069986 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerName="glance-httpd" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.069995 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerName="glance-httpd" Dec 11 09:45:47 crc kubenswrapper[4788]: E1211 09:45:47.070009 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerName="registry-server" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.070015 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerName="registry-server" Dec 11 09:45:47 crc kubenswrapper[4788]: E1211 09:45:47.070029 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerName="extract-utilities" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.070035 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerName="extract-utilities" Dec 11 09:45:47 crc kubenswrapper[4788]: E1211 09:45:47.070063 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerName="glance-log" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.070071 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerName="glance-log" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.070313 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerName="glance-log" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.070331 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" containerName="glance-httpd" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.070348 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="444902a0-9cad-4a39-8e23-9c56956e80e8" containerName="registry-server" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.072218 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.074891 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.075237 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.121639 4788 scope.go:117] "RemoveContainer" containerID="0a2fc35ac268f28cca323ef73d1f7ebc83dcf6c8c42aa114ba315444bf8ce463" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.126554 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkqhq\" (UniqueName: \"kubernetes.io/projected/444902a0-9cad-4a39-8e23-9c56956e80e8-kube-api-access-rkqhq\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.126598 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.129110 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.150191 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "444902a0-9cad-4a39-8e23-9c56956e80e8" (UID: "444902a0-9cad-4a39-8e23-9c56956e80e8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.176406 4788 scope.go:117] "RemoveContainer" containerID="9254f98245e1962faea7c66be640930019610f6c34a07af9cd03b4c683e2ba9a" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.228819 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrbzb\" (UniqueName: \"kubernetes.io/projected/621ba590-fb77-4a71-a559-62c75a7f15dc-kube-api-access-vrbzb\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.229306 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.230533 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/621ba590-fb77-4a71-a559-62c75a7f15dc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.230618 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-scripts\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.230675 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.230714 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-config-data\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.230728 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/621ba590-fb77-4a71-a559-62c75a7f15dc-logs\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.230940 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.231051 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/444902a0-9cad-4a39-8e23-9c56956e80e8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.332445 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/621ba590-fb77-4a71-a559-62c75a7f15dc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.332788 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-scripts\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.332897 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.332990 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-config-data\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.333081 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/621ba590-fb77-4a71-a559-62c75a7f15dc-logs\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.333247 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.333366 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrbzb\" (UniqueName: \"kubernetes.io/projected/621ba590-fb77-4a71-a559-62c75a7f15dc-kube-api-access-vrbzb\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.333517 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.333700 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.341766 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/621ba590-fb77-4a71-a559-62c75a7f15dc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.342651 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/621ba590-fb77-4a71-a559-62c75a7f15dc-logs\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.347112 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.355565 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-config-data\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.357965 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-scripts\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.368333 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/621ba590-fb77-4a71-a559-62c75a7f15dc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.383674 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.386485 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrbzb\" (UniqueName: \"kubernetes.io/projected/621ba590-fb77-4a71-a559-62c75a7f15dc-kube-api-access-vrbzb\") pod \"glance-default-external-api-0\" (UID: \"621ba590-fb77-4a71-a559-62c75a7f15dc\") " pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.445081 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.579113 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l2r89"] Dec 11 09:45:47 crc kubenswrapper[4788]: I1211 09:45:47.592201 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l2r89"] Dec 11 09:45:48 crc kubenswrapper[4788]: I1211 09:45:48.024030 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c08ac5a4-1005-40b0-8953-e6b0e62a384f","Type":"ContainerStarted","Data":"af1061705a788674b618e2659608ab37e363c2090b50c19c9e855903db2bef83"} Dec 11 09:45:48 crc kubenswrapper[4788]: I1211 09:45:48.109347 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 11 09:45:48 crc kubenswrapper[4788]: W1211 09:45:48.114040 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod621ba590_fb77_4a71_a559_62c75a7f15dc.slice/crio-307f1b3f5e05c4886958eb594325947168d175298f62670dd0bdc34f0cc50517 WatchSource:0}: Error finding container 307f1b3f5e05c4886958eb594325947168d175298f62670dd0bdc34f0cc50517: Status 404 returned error can't find the container with id 307f1b3f5e05c4886958eb594325947168d175298f62670dd0bdc34f0cc50517 Dec 11 09:45:48 crc kubenswrapper[4788]: I1211 09:45:48.511579 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02366a9b-897c-4132-9981-b1c4a5dc7fc7" path="/var/lib/kubelet/pods/02366a9b-897c-4132-9981-b1c4a5dc7fc7/volumes" Dec 11 09:45:48 crc kubenswrapper[4788]: I1211 09:45:48.512618 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="444902a0-9cad-4a39-8e23-9c56956e80e8" path="/var/lib/kubelet/pods/444902a0-9cad-4a39-8e23-9c56956e80e8/volumes" Dec 11 09:45:49 crc kubenswrapper[4788]: I1211 09:45:49.047352 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c08ac5a4-1005-40b0-8953-e6b0e62a384f","Type":"ContainerStarted","Data":"6c07e028b66222890417d73fb12538d71eb24cd1d5f32fddc8ee1c1fa27a445c"} Dec 11 09:45:49 crc kubenswrapper[4788]: I1211 09:45:49.051095 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"621ba590-fb77-4a71-a559-62c75a7f15dc","Type":"ContainerStarted","Data":"28cb4f2f1dccf98c4d1c8fb673fd4841d9efeef61923cfa769dd765fb99e7955"} Dec 11 09:45:49 crc kubenswrapper[4788]: I1211 09:45:49.051149 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"621ba590-fb77-4a71-a559-62c75a7f15dc","Type":"ContainerStarted","Data":"307f1b3f5e05c4886958eb594325947168d175298f62670dd0bdc34f0cc50517"} Dec 11 09:45:49 crc kubenswrapper[4788]: I1211 09:45:49.508515 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:49 crc kubenswrapper[4788]: I1211 09:45:49.508804 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:49 crc kubenswrapper[4788]: I1211 09:45:49.552427 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:49 crc kubenswrapper[4788]: I1211 09:45:49.557530 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:50 crc kubenswrapper[4788]: I1211 09:45:50.062534 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hzdhz" event={"ID":"50a543f4-3907-44d1-8dca-7c180fc4dab2","Type":"ContainerStarted","Data":"b16be0b3a5f035eebf33ca393d64fc4c5a56726c361a09800819d27f44825651"} Dec 11 09:45:50 crc kubenswrapper[4788]: I1211 09:45:50.067409 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"621ba590-fb77-4a71-a559-62c75a7f15dc","Type":"ContainerStarted","Data":"ad8f310857e3cdc0d3f6a88ee151d7a3ca5389b55c524c3fd098dd6757456709"} Dec 11 09:45:50 crc kubenswrapper[4788]: I1211 09:45:50.067484 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:50 crc kubenswrapper[4788]: I1211 09:45:50.067503 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:50 crc kubenswrapper[4788]: I1211 09:45:50.083442 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-hzdhz" podStartSLOduration=3.432926466 podStartE2EDuration="46.083417774s" podCreationTimestamp="2025-12-11 09:45:04 +0000 UTC" firstStartedPulling="2025-12-11 09:45:06.226435805 +0000 UTC m=+1436.297215391" lastFinishedPulling="2025-12-11 09:45:48.876927123 +0000 UTC m=+1478.947706699" observedRunningTime="2025-12-11 09:45:50.081636039 +0000 UTC m=+1480.152415645" watchObservedRunningTime="2025-12-11 09:45:50.083417774 +0000 UTC m=+1480.154197360" Dec 11 09:45:50 crc kubenswrapper[4788]: I1211 09:45:50.113427 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.113406606 podStartE2EDuration="3.113406606s" podCreationTimestamp="2025-12-11 09:45:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:45:50.113260303 +0000 UTC m=+1480.184039899" watchObservedRunningTime="2025-12-11 09:45:50.113406606 +0000 UTC m=+1480.184186222" Dec 11 09:45:51 crc kubenswrapper[4788]: I1211 09:45:51.082874 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="ceilometer-central-agent" containerID="cri-o://9ffc9a1fc0074336cf683dcc90dfe8d93afa6a955dd4eac6dcc478775ade9d81" gracePeriod=30 Dec 11 09:45:51 crc kubenswrapper[4788]: I1211 09:45:51.082960 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="sg-core" containerID="cri-o://6c07e028b66222890417d73fb12538d71eb24cd1d5f32fddc8ee1c1fa27a445c" gracePeriod=30 Dec 11 09:45:51 crc kubenswrapper[4788]: I1211 09:45:51.083041 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="ceilometer-notification-agent" containerID="cri-o://af1061705a788674b618e2659608ab37e363c2090b50c19c9e855903db2bef83" gracePeriod=30 Dec 11 09:45:51 crc kubenswrapper[4788]: I1211 09:45:51.083017 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="proxy-httpd" containerID="cri-o://4f16cf2e3231143009eb293259291336a001d4facff57a41376e4d5d872d8693" gracePeriod=30 Dec 11 09:45:51 crc kubenswrapper[4788]: I1211 09:45:51.082816 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c08ac5a4-1005-40b0-8953-e6b0e62a384f","Type":"ContainerStarted","Data":"4f16cf2e3231143009eb293259291336a001d4facff57a41376e4d5d872d8693"} Dec 11 09:45:51 crc kubenswrapper[4788]: I1211 09:45:51.083409 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 11 09:45:51 crc kubenswrapper[4788]: I1211 09:45:51.127301 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.6183114659999998 podStartE2EDuration="8.127284328s" podCreationTimestamp="2025-12-11 09:45:43 +0000 UTC" firstStartedPulling="2025-12-11 09:45:44.872148582 +0000 UTC m=+1474.942928168" lastFinishedPulling="2025-12-11 09:45:50.381121444 +0000 UTC m=+1480.451901030" observedRunningTime="2025-12-11 09:45:51.124879107 +0000 UTC m=+1481.195658713" watchObservedRunningTime="2025-12-11 09:45:51.127284328 +0000 UTC m=+1481.198063914" Dec 11 09:45:52 crc kubenswrapper[4788]: I1211 09:45:52.098861 4788 generic.go:334] "Generic (PLEG): container finished" podID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerID="4f16cf2e3231143009eb293259291336a001d4facff57a41376e4d5d872d8693" exitCode=0 Dec 11 09:45:52 crc kubenswrapper[4788]: I1211 09:45:52.098908 4788 generic.go:334] "Generic (PLEG): container finished" podID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerID="6c07e028b66222890417d73fb12538d71eb24cd1d5f32fddc8ee1c1fa27a445c" exitCode=2 Dec 11 09:45:52 crc kubenswrapper[4788]: I1211 09:45:52.098919 4788 generic.go:334] "Generic (PLEG): container finished" podID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerID="af1061705a788674b618e2659608ab37e363c2090b50c19c9e855903db2bef83" exitCode=0 Dec 11 09:45:52 crc kubenswrapper[4788]: I1211 09:45:52.098942 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c08ac5a4-1005-40b0-8953-e6b0e62a384f","Type":"ContainerDied","Data":"4f16cf2e3231143009eb293259291336a001d4facff57a41376e4d5d872d8693"} Dec 11 09:45:52 crc kubenswrapper[4788]: I1211 09:45:52.099011 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c08ac5a4-1005-40b0-8953-e6b0e62a384f","Type":"ContainerDied","Data":"6c07e028b66222890417d73fb12538d71eb24cd1d5f32fddc8ee1c1fa27a445c"} Dec 11 09:45:52 crc kubenswrapper[4788]: I1211 09:45:52.099030 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c08ac5a4-1005-40b0-8953-e6b0e62a384f","Type":"ContainerDied","Data":"af1061705a788674b618e2659608ab37e363c2090b50c19c9e855903db2bef83"} Dec 11 09:45:52 crc kubenswrapper[4788]: I1211 09:45:52.099016 4788 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 09:45:52 crc kubenswrapper[4788]: I1211 09:45:52.099064 4788 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 09:45:52 crc kubenswrapper[4788]: I1211 09:45:52.775151 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:52 crc kubenswrapper[4788]: I1211 09:45:52.783936 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.155797 4788 generic.go:334] "Generic (PLEG): container finished" podID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerID="9ffc9a1fc0074336cf683dcc90dfe8d93afa6a955dd4eac6dcc478775ade9d81" exitCode=0 Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.156373 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c08ac5a4-1005-40b0-8953-e6b0e62a384f","Type":"ContainerDied","Data":"9ffc9a1fc0074336cf683dcc90dfe8d93afa6a955dd4eac6dcc478775ade9d81"} Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.445898 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.445957 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.481666 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.491999 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.816587 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.871503 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-combined-ca-bundle\") pod \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.871559 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-sg-core-conf-yaml\") pod \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.871611 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-scripts\") pod \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.871651 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-run-httpd\") pod \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.871743 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-config-data\") pod \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.871784 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-ceilometer-tls-certs\") pod \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.871803 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtx7g\" (UniqueName: \"kubernetes.io/projected/c08ac5a4-1005-40b0-8953-e6b0e62a384f-kube-api-access-qtx7g\") pod \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.871861 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-log-httpd\") pod \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\" (UID: \"c08ac5a4-1005-40b0-8953-e6b0e62a384f\") " Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.872882 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c08ac5a4-1005-40b0-8953-e6b0e62a384f" (UID: "c08ac5a4-1005-40b0-8953-e6b0e62a384f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.873165 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c08ac5a4-1005-40b0-8953-e6b0e62a384f" (UID: "c08ac5a4-1005-40b0-8953-e6b0e62a384f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.880608 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c08ac5a4-1005-40b0-8953-e6b0e62a384f-kube-api-access-qtx7g" (OuterVolumeSpecName: "kube-api-access-qtx7g") pod "c08ac5a4-1005-40b0-8953-e6b0e62a384f" (UID: "c08ac5a4-1005-40b0-8953-e6b0e62a384f"). InnerVolumeSpecName "kube-api-access-qtx7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.886653 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-scripts" (OuterVolumeSpecName: "scripts") pod "c08ac5a4-1005-40b0-8953-e6b0e62a384f" (UID: "c08ac5a4-1005-40b0-8953-e6b0e62a384f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.917592 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c08ac5a4-1005-40b0-8953-e6b0e62a384f" (UID: "c08ac5a4-1005-40b0-8953-e6b0e62a384f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.942163 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "c08ac5a4-1005-40b0-8953-e6b0e62a384f" (UID: "c08ac5a4-1005-40b0-8953-e6b0e62a384f"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.966618 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c08ac5a4-1005-40b0-8953-e6b0e62a384f" (UID: "c08ac5a4-1005-40b0-8953-e6b0e62a384f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.974342 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.974383 4788 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.974395 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.974405 4788 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.974415 4788 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.974425 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtx7g\" (UniqueName: \"kubernetes.io/projected/c08ac5a4-1005-40b0-8953-e6b0e62a384f-kube-api-access-qtx7g\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.974437 4788 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c08ac5a4-1005-40b0-8953-e6b0e62a384f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:57 crc kubenswrapper[4788]: I1211 09:45:57.989190 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-config-data" (OuterVolumeSpecName: "config-data") pod "c08ac5a4-1005-40b0-8953-e6b0e62a384f" (UID: "c08ac5a4-1005-40b0-8953-e6b0e62a384f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.077205 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c08ac5a4-1005-40b0-8953-e6b0e62a384f-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.170901 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c08ac5a4-1005-40b0-8953-e6b0e62a384f","Type":"ContainerDied","Data":"710677dbcad528017b1a222ddd3d1e02337d536b6b3b016876c1ed2f3df09f77"} Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.171326 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.170933 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.171359 4788 scope.go:117] "RemoveContainer" containerID="4f16cf2e3231143009eb293259291336a001d4facff57a41376e4d5d872d8693" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.171347 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.196496 4788 scope.go:117] "RemoveContainer" containerID="6c07e028b66222890417d73fb12538d71eb24cd1d5f32fddc8ee1c1fa27a445c" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.216408 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.225043 4788 scope.go:117] "RemoveContainer" containerID="af1061705a788674b618e2659608ab37e363c2090b50c19c9e855903db2bef83" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.237661 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.261293 4788 scope.go:117] "RemoveContainer" containerID="9ffc9a1fc0074336cf683dcc90dfe8d93afa6a955dd4eac6dcc478775ade9d81" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.266899 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:58 crc kubenswrapper[4788]: E1211 09:45:58.267814 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="ceilometer-central-agent" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.267922 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="ceilometer-central-agent" Dec 11 09:45:58 crc kubenswrapper[4788]: E1211 09:45:58.268123 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="proxy-httpd" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.268221 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="proxy-httpd" Dec 11 09:45:58 crc kubenswrapper[4788]: E1211 09:45:58.268408 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="ceilometer-notification-agent" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.268486 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="ceilometer-notification-agent" Dec 11 09:45:58 crc kubenswrapper[4788]: E1211 09:45:58.268595 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="sg-core" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.268673 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="sg-core" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.269331 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="ceilometer-central-agent" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.269448 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="proxy-httpd" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.269527 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="sg-core" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.269596 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" containerName="ceilometer-notification-agent" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.274590 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.280833 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.281912 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.283355 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.283802 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.487543 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-log-httpd\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.487748 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59q4h\" (UniqueName: \"kubernetes.io/projected/3a795382-940a-45a1-bb02-8b88317194a6-kube-api-access-59q4h\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.487870 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.488349 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-scripts\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.488501 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.488765 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-run-httpd\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.488865 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-config-data\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.489030 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.507178 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c08ac5a4-1005-40b0-8953-e6b0e62a384f" path="/var/lib/kubelet/pods/c08ac5a4-1005-40b0-8953-e6b0e62a384f/volumes" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.591186 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-scripts\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.591303 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.591363 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-run-httpd\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.591382 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-config-data\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.591442 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.591491 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-log-httpd\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.591526 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59q4h\" (UniqueName: \"kubernetes.io/projected/3a795382-940a-45a1-bb02-8b88317194a6-kube-api-access-59q4h\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.591555 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.592508 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-log-httpd\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.592651 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-run-httpd\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.596719 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.597032 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-scripts\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.597997 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.598128 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.599830 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-config-data\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.616619 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59q4h\" (UniqueName: \"kubernetes.io/projected/3a795382-940a-45a1-bb02-8b88317194a6-kube-api-access-59q4h\") pod \"ceilometer-0\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " pod="openstack/ceilometer-0" Dec 11 09:45:58 crc kubenswrapper[4788]: I1211 09:45:58.902205 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:45:59 crc kubenswrapper[4788]: I1211 09:45:59.415161 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:46:00 crc kubenswrapper[4788]: I1211 09:46:00.197579 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a795382-940a-45a1-bb02-8b88317194a6","Type":"ContainerStarted","Data":"bfdc03adbdd7e75495b3a034b4002f4ce8bc868481ae506af5aa08be3378d7fb"} Dec 11 09:46:00 crc kubenswrapper[4788]: I1211 09:46:00.424158 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 11 09:46:00 crc kubenswrapper[4788]: I1211 09:46:00.424402 4788 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 09:46:00 crc kubenswrapper[4788]: I1211 09:46:00.510944 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 11 09:46:01 crc kubenswrapper[4788]: I1211 09:46:01.240764 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a795382-940a-45a1-bb02-8b88317194a6","Type":"ContainerStarted","Data":"383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91"} Dec 11 09:46:02 crc kubenswrapper[4788]: I1211 09:46:02.249812 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a795382-940a-45a1-bb02-8b88317194a6","Type":"ContainerStarted","Data":"d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f"} Dec 11 09:46:04 crc kubenswrapper[4788]: I1211 09:46:04.272933 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a795382-940a-45a1-bb02-8b88317194a6","Type":"ContainerStarted","Data":"501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4"} Dec 11 09:46:06 crc kubenswrapper[4788]: I1211 09:46:06.299281 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a795382-940a-45a1-bb02-8b88317194a6","Type":"ContainerStarted","Data":"c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df"} Dec 11 09:46:06 crc kubenswrapper[4788]: I1211 09:46:06.299991 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 11 09:46:07 crc kubenswrapper[4788]: I1211 09:46:07.310359 4788 generic.go:334] "Generic (PLEG): container finished" podID="50a543f4-3907-44d1-8dca-7c180fc4dab2" containerID="b16be0b3a5f035eebf33ca393d64fc4c5a56726c361a09800819d27f44825651" exitCode=0 Dec 11 09:46:07 crc kubenswrapper[4788]: I1211 09:46:07.310456 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hzdhz" event={"ID":"50a543f4-3907-44d1-8dca-7c180fc4dab2","Type":"ContainerDied","Data":"b16be0b3a5f035eebf33ca393d64fc4c5a56726c361a09800819d27f44825651"} Dec 11 09:46:07 crc kubenswrapper[4788]: I1211 09:46:07.331698 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.300418532 podStartE2EDuration="9.331678182s" podCreationTimestamp="2025-12-11 09:45:58 +0000 UTC" firstStartedPulling="2025-12-11 09:45:59.429338467 +0000 UTC m=+1489.500118053" lastFinishedPulling="2025-12-11 09:46:05.460598117 +0000 UTC m=+1495.531377703" observedRunningTime="2025-12-11 09:46:06.322009669 +0000 UTC m=+1496.392789255" watchObservedRunningTime="2025-12-11 09:46:07.331678182 +0000 UTC m=+1497.402457768" Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.696817 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.810472 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-config-data\") pod \"50a543f4-3907-44d1-8dca-7c180fc4dab2\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.810580 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-combined-ca-bundle\") pod \"50a543f4-3907-44d1-8dca-7c180fc4dab2\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.810712 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-scripts\") pod \"50a543f4-3907-44d1-8dca-7c180fc4dab2\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.810802 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgc6n\" (UniqueName: \"kubernetes.io/projected/50a543f4-3907-44d1-8dca-7c180fc4dab2-kube-api-access-sgc6n\") pod \"50a543f4-3907-44d1-8dca-7c180fc4dab2\" (UID: \"50a543f4-3907-44d1-8dca-7c180fc4dab2\") " Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.831169 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-scripts" (OuterVolumeSpecName: "scripts") pod "50a543f4-3907-44d1-8dca-7c180fc4dab2" (UID: "50a543f4-3907-44d1-8dca-7c180fc4dab2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.831207 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50a543f4-3907-44d1-8dca-7c180fc4dab2-kube-api-access-sgc6n" (OuterVolumeSpecName: "kube-api-access-sgc6n") pod "50a543f4-3907-44d1-8dca-7c180fc4dab2" (UID: "50a543f4-3907-44d1-8dca-7c180fc4dab2"). InnerVolumeSpecName "kube-api-access-sgc6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.840573 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50a543f4-3907-44d1-8dca-7c180fc4dab2" (UID: "50a543f4-3907-44d1-8dca-7c180fc4dab2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.844810 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-config-data" (OuterVolumeSpecName: "config-data") pod "50a543f4-3907-44d1-8dca-7c180fc4dab2" (UID: "50a543f4-3907-44d1-8dca-7c180fc4dab2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.912634 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgc6n\" (UniqueName: \"kubernetes.io/projected/50a543f4-3907-44d1-8dca-7c180fc4dab2-kube-api-access-sgc6n\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.912931 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.912941 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:08 crc kubenswrapper[4788]: I1211 09:46:08.912949 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50a543f4-3907-44d1-8dca-7c180fc4dab2-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.337692 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hzdhz" event={"ID":"50a543f4-3907-44d1-8dca-7c180fc4dab2","Type":"ContainerDied","Data":"9949d50d4f9e2351778ad66f1236513c58861ae9c207935d2f121354ec8c740c"} Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.337741 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9949d50d4f9e2351778ad66f1236513c58861ae9c207935d2f121354ec8c740c" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.337754 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hzdhz" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.602577 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 11 09:46:09 crc kubenswrapper[4788]: E1211 09:46:09.603087 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50a543f4-3907-44d1-8dca-7c180fc4dab2" containerName="nova-cell0-conductor-db-sync" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.603112 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="50a543f4-3907-44d1-8dca-7c180fc4dab2" containerName="nova-cell0-conductor-db-sync" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.603452 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="50a543f4-3907-44d1-8dca-7c180fc4dab2" containerName="nova-cell0-conductor-db-sync" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.604299 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.606739 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.606886 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-6f9hq" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.627357 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.731218 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0dba180-1bb0-4596-be23-66721a174129-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e0dba180-1bb0-4596-be23-66721a174129\") " pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.731307 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0dba180-1bb0-4596-be23-66721a174129-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e0dba180-1bb0-4596-be23-66721a174129\") " pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.731553 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnqr4\" (UniqueName: \"kubernetes.io/projected/e0dba180-1bb0-4596-be23-66721a174129-kube-api-access-pnqr4\") pod \"nova-cell0-conductor-0\" (UID: \"e0dba180-1bb0-4596-be23-66721a174129\") " pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.833402 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnqr4\" (UniqueName: \"kubernetes.io/projected/e0dba180-1bb0-4596-be23-66721a174129-kube-api-access-pnqr4\") pod \"nova-cell0-conductor-0\" (UID: \"e0dba180-1bb0-4596-be23-66721a174129\") " pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.833620 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0dba180-1bb0-4596-be23-66721a174129-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e0dba180-1bb0-4596-be23-66721a174129\") " pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.833655 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0dba180-1bb0-4596-be23-66721a174129-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e0dba180-1bb0-4596-be23-66721a174129\") " pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.838728 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0dba180-1bb0-4596-be23-66721a174129-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e0dba180-1bb0-4596-be23-66721a174129\") " pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.841832 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0dba180-1bb0-4596-be23-66721a174129-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e0dba180-1bb0-4596-be23-66721a174129\") " pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.854022 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnqr4\" (UniqueName: \"kubernetes.io/projected/e0dba180-1bb0-4596-be23-66721a174129-kube-api-access-pnqr4\") pod \"nova-cell0-conductor-0\" (UID: \"e0dba180-1bb0-4596-be23-66721a174129\") " pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:09 crc kubenswrapper[4788]: I1211 09:46:09.931390 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:10 crc kubenswrapper[4788]: W1211 09:46:10.409384 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0dba180_1bb0_4596_be23_66721a174129.slice/crio-2c2194d428918e5899527f14613160fcfd8c1bd8df5f6856f94333db18d548a0 WatchSource:0}: Error finding container 2c2194d428918e5899527f14613160fcfd8c1bd8df5f6856f94333db18d548a0: Status 404 returned error can't find the container with id 2c2194d428918e5899527f14613160fcfd8c1bd8df5f6856f94333db18d548a0 Dec 11 09:46:10 crc kubenswrapper[4788]: I1211 09:46:10.418291 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 11 09:46:11 crc kubenswrapper[4788]: I1211 09:46:11.358364 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e0dba180-1bb0-4596-be23-66721a174129","Type":"ContainerStarted","Data":"fe6037f2807a0d76fceb8d9be2c4837e0dab8db66e3083dd52f53ddbc91dfe0d"} Dec 11 09:46:11 crc kubenswrapper[4788]: I1211 09:46:11.358681 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e0dba180-1bb0-4596-be23-66721a174129","Type":"ContainerStarted","Data":"2c2194d428918e5899527f14613160fcfd8c1bd8df5f6856f94333db18d548a0"} Dec 11 09:46:11 crc kubenswrapper[4788]: I1211 09:46:11.360084 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:19 crc kubenswrapper[4788]: I1211 09:46:19.961267 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 11 09:46:19 crc kubenswrapper[4788]: I1211 09:46:19.987543 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=10.987517509 podStartE2EDuration="10.987517509s" podCreationTimestamp="2025-12-11 09:46:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:46:11.380064398 +0000 UTC m=+1501.450844004" watchObservedRunningTime="2025-12-11 09:46:19.987517509 +0000 UTC m=+1510.058297095" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.428515 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-wx8sg"] Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.430117 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.433584 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.437673 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.440110 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-wx8sg"] Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.558408 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtmzh\" (UniqueName: \"kubernetes.io/projected/2e204962-9911-4a7c-b2da-b4a614f548a6-kube-api-access-qtmzh\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.558461 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.558799 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-config-data\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.559150 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-scripts\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.661499 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-scripts\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.661588 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtmzh\" (UniqueName: \"kubernetes.io/projected/2e204962-9911-4a7c-b2da-b4a614f548a6-kube-api-access-qtmzh\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.661609 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.661701 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-config-data\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.675945 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-scripts\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.685019 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-config-data\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.689002 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.718501 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.725028 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtmzh\" (UniqueName: \"kubernetes.io/projected/2e204962-9911-4a7c-b2da-b4a614f548a6-kube-api-access-qtmzh\") pod \"nova-cell0-cell-mapping-wx8sg\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.727420 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.736572 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.755729 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.766849 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.767136 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjq85\" (UniqueName: \"kubernetes.io/projected/9dde1c59-16f6-450d-9876-34611ef7ffd7-kube-api-access-xjq85\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.767168 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-config-data\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.767220 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dde1c59-16f6-450d-9876-34611ef7ffd7-logs\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.767663 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.788533 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.805394 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.821678 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.824020 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.869341 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjq85\" (UniqueName: \"kubernetes.io/projected/9dde1c59-16f6-450d-9876-34611ef7ffd7-kube-api-access-xjq85\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.869514 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-config-data\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.869631 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7494\" (UniqueName: \"kubernetes.io/projected/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-kube-api-access-f7494\") pod \"nova-cell1-novncproxy-0\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.870498 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dde1c59-16f6-450d-9876-34611ef7ffd7-logs\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.870645 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.870718 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.870815 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.873830 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dde1c59-16f6-450d-9876-34611ef7ffd7-logs\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.879275 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.884344 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.885985 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-config-data\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.888575 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.893608 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.922675 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.924420 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjq85\" (UniqueName: \"kubernetes.io/projected/9dde1c59-16f6-450d-9876-34611ef7ffd7-kube-api-access-xjq85\") pod \"nova-metadata-0\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " pod="openstack/nova-metadata-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.973398 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7494\" (UniqueName: \"kubernetes.io/projected/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-kube-api-access-f7494\") pod \"nova-cell1-novncproxy-0\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.974072 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvg7l\" (UniqueName: \"kubernetes.io/projected/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-kube-api-access-zvg7l\") pod \"nova-scheduler-0\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.974172 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-config-data\") pod \"nova-scheduler-0\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.974285 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.974374 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.974448 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.988550 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.988642 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-xvscx"] Dec 11 09:46:20 crc kubenswrapper[4788]: I1211 09:46:20.990457 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.006013 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7494\" (UniqueName: \"kubernetes.io/projected/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-kube-api-access-f7494\") pod \"nova-cell1-novncproxy-0\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.007816 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-xvscx"] Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.008697 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.034312 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.048135 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.061725 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.074535 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.076382 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-config\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.076439 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvg7l\" (UniqueName: \"kubernetes.io/projected/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-kube-api-access-zvg7l\") pod \"nova-scheduler-0\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.076458 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpw8h\" (UniqueName: \"kubernetes.io/projected/107c813a-c386-4ffe-8206-ff0badb52f03-kube-api-access-wpw8h\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.076500 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-config-data\") pod \"nova-scheduler-0\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.076537 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.076569 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.076617 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.076716 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.076740 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-svc\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.090113 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.120332 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.123969 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-config-data\") pod \"nova-scheduler-0\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.128459 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.129869 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvg7l\" (UniqueName: \"kubernetes.io/projected/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-kube-api-access-zvg7l\") pod \"nova-scheduler-0\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.142982 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.196128 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-config\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.196194 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpw8h\" (UniqueName: \"kubernetes.io/projected/107c813a-c386-4ffe-8206-ff0badb52f03-kube-api-access-wpw8h\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.196350 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.196381 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc1052e-d06c-4464-b1af-015524e53bc6-logs\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.196736 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.196767 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.196912 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l789\" (UniqueName: \"kubernetes.io/projected/3fc1052e-d06c-4464-b1af-015524e53bc6-kube-api-access-8l789\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.196990 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-config-data\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.197115 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.197159 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-svc\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.197618 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-config\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.198433 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.199821 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.205522 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-svc\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.220397 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.254008 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpw8h\" (UniqueName: \"kubernetes.io/projected/107c813a-c386-4ffe-8206-ff0badb52f03-kube-api-access-wpw8h\") pod \"dnsmasq-dns-bccf8f775-xvscx\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.300626 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.300689 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc1052e-d06c-4464-b1af-015524e53bc6-logs\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.300817 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l789\" (UniqueName: \"kubernetes.io/projected/3fc1052e-d06c-4464-b1af-015524e53bc6-kube-api-access-8l789\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.300861 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-config-data\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.309571 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.309949 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc1052e-d06c-4464-b1af-015524e53bc6-logs\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.316888 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-config-data\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.341734 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l789\" (UniqueName: \"kubernetes.io/projected/3fc1052e-d06c-4464-b1af-015524e53bc6-kube-api-access-8l789\") pod \"nova-api-0\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.458704 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.484867 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.970261 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jgd9g"] Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.972796 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.977261 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 11 09:46:21 crc kubenswrapper[4788]: I1211 09:46:21.986910 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.005116 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jgd9g"] Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.022922 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-scripts\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.022980 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.023024 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppd9m\" (UniqueName: \"kubernetes.io/projected/ef23e667-905e-41dd-a0ac-9739133fbbf9-kube-api-access-ppd9m\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.023300 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-config-data\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.109144 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:22 crc kubenswrapper[4788]: W1211 09:46:22.109955 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c8f7cd2_ce22_4f74_a099_721f8f7454bd.slice/crio-ab6e081769c0fad972337f7d283c968d95a01d45334103ab5e244de9eae38f33 WatchSource:0}: Error finding container ab6e081769c0fad972337f7d283c968d95a01d45334103ab5e244de9eae38f33: Status 404 returned error can't find the container with id ab6e081769c0fad972337f7d283c968d95a01d45334103ab5e244de9eae38f33 Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.125634 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-scripts\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.125695 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.125803 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppd9m\" (UniqueName: \"kubernetes.io/projected/ef23e667-905e-41dd-a0ac-9739133fbbf9-kube-api-access-ppd9m\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.125863 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-config-data\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.141578 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-config-data\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.144570 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.152011 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.159265 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-scripts\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.160041 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppd9m\" (UniqueName: \"kubernetes.io/projected/ef23e667-905e-41dd-a0ac-9739133fbbf9-kube-api-access-ppd9m\") pod \"nova-cell1-conductor-db-sync-jgd9g\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: W1211 09:46:22.181510 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e204962_9911_4a7c_b2da_b4a614f548a6.slice/crio-7e2c153c07608325975cf2e746dbdceb7b9d4c98ae0f38d58e3b40f6f1aa98a3 WatchSource:0}: Error finding container 7e2c153c07608325975cf2e746dbdceb7b9d4c98ae0f38d58e3b40f6f1aa98a3: Status 404 returned error can't find the container with id 7e2c153c07608325975cf2e746dbdceb7b9d4c98ae0f38d58e3b40f6f1aa98a3 Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.200359 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s9tsw"] Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.202604 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.229921 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.247921 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s9tsw"] Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.266260 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-wx8sg"] Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.314263 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.318561 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-xvscx"] Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.337049 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wjg5\" (UniqueName: \"kubernetes.io/projected/c0610a75-ae4d-4692-b4d5-7dbf70008a94-kube-api-access-6wjg5\") pod \"community-operators-s9tsw\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.337104 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-catalog-content\") pod \"community-operators-s9tsw\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.337197 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-utilities\") pod \"community-operators-s9tsw\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.343385 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.459431 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wjg5\" (UniqueName: \"kubernetes.io/projected/c0610a75-ae4d-4692-b4d5-7dbf70008a94-kube-api-access-6wjg5\") pod \"community-operators-s9tsw\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.459500 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-catalog-content\") pod \"community-operators-s9tsw\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.459713 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-utilities\") pod \"community-operators-s9tsw\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.460465 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-utilities\") pod \"community-operators-s9tsw\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.461590 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-catalog-content\") pod \"community-operators-s9tsw\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.488052 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wjg5\" (UniqueName: \"kubernetes.io/projected/c0610a75-ae4d-4692-b4d5-7dbf70008a94-kube-api-access-6wjg5\") pod \"community-operators-s9tsw\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.538320 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0c8f7cd2-ce22-4f74-a099-721f8f7454bd","Type":"ContainerStarted","Data":"ab6e081769c0fad972337f7d283c968d95a01d45334103ab5e244de9eae38f33"} Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.569637 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" event={"ID":"107c813a-c386-4ffe-8206-ff0badb52f03","Type":"ContainerStarted","Data":"3a3837d8d987429025086faabad27ffac220fe404e230c3c6e81e6c1eb689761"} Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.578515 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9dde1c59-16f6-450d-9876-34611ef7ffd7","Type":"ContainerStarted","Data":"e263c067a78ef82862a22b8b59f17961abb9876b1d3ab7dbb966ee288d32e634"} Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.581437 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3fc1052e-d06c-4464-b1af-015524e53bc6","Type":"ContainerStarted","Data":"7410e07fc4bf551432fa1a3edc4d7733f6cd2a41363a77c68556d73ec4db2b37"} Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.583142 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"653a9065-1ac9-40c5-bb56-c3544cd3e9f6","Type":"ContainerStarted","Data":"66fd348669f4f24fa9ca71c7b4ace97f4a2c2b02e3e0aac161b7266d27266552"} Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.592696 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-wx8sg" event={"ID":"2e204962-9911-4a7c-b2da-b4a614f548a6","Type":"ContainerStarted","Data":"7e2c153c07608325975cf2e746dbdceb7b9d4c98ae0f38d58e3b40f6f1aa98a3"} Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.615619 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:22 crc kubenswrapper[4788]: I1211 09:46:22.895518 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jgd9g"] Dec 11 09:46:22 crc kubenswrapper[4788]: W1211 09:46:22.922052 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef23e667_905e_41dd_a0ac_9739133fbbf9.slice/crio-6d1b287b16b02084c05bafc0458dcca44a9b717f39438b55ed970e18c4e5c090 WatchSource:0}: Error finding container 6d1b287b16b02084c05bafc0458dcca44a9b717f39438b55ed970e18c4e5c090: Status 404 returned error can't find the container with id 6d1b287b16b02084c05bafc0458dcca44a9b717f39438b55ed970e18c4e5c090 Dec 11 09:46:23 crc kubenswrapper[4788]: I1211 09:46:23.112163 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s9tsw"] Dec 11 09:46:23 crc kubenswrapper[4788]: I1211 09:46:23.603596 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s9tsw" event={"ID":"c0610a75-ae4d-4692-b4d5-7dbf70008a94","Type":"ContainerStarted","Data":"4dedd454875eaa5a6091f540a653f0c227f5fbc931eeb966a27a5421ac145648"} Dec 11 09:46:23 crc kubenswrapper[4788]: I1211 09:46:23.605415 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jgd9g" event={"ID":"ef23e667-905e-41dd-a0ac-9739133fbbf9","Type":"ContainerStarted","Data":"6d1b287b16b02084c05bafc0458dcca44a9b717f39438b55ed970e18c4e5c090"} Dec 11 09:46:24 crc kubenswrapper[4788]: I1211 09:46:24.639915 4788 generic.go:334] "Generic (PLEG): container finished" podID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" containerID="8825ffd27731a6f93be177576e584b31a473128aab27b41ef42940a29c5d1d00" exitCode=0 Dec 11 09:46:24 crc kubenswrapper[4788]: I1211 09:46:24.640962 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s9tsw" event={"ID":"c0610a75-ae4d-4692-b4d5-7dbf70008a94","Type":"ContainerDied","Data":"8825ffd27731a6f93be177576e584b31a473128aab27b41ef42940a29c5d1d00"} Dec 11 09:46:24 crc kubenswrapper[4788]: I1211 09:46:24.654806 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-wx8sg" event={"ID":"2e204962-9911-4a7c-b2da-b4a614f548a6","Type":"ContainerStarted","Data":"140fe4839241daf0c592246ae12ef1c55791f6e7c12445313a4b142c69fa8a0d"} Dec 11 09:46:24 crc kubenswrapper[4788]: I1211 09:46:24.670740 4788 generic.go:334] "Generic (PLEG): container finished" podID="107c813a-c386-4ffe-8206-ff0badb52f03" containerID="0bad877ff5952d9824659098ab7e8c88a118b9a49b61a619498ae46b43938f08" exitCode=0 Dec 11 09:46:24 crc kubenswrapper[4788]: I1211 09:46:24.670831 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" event={"ID":"107c813a-c386-4ffe-8206-ff0badb52f03","Type":"ContainerDied","Data":"0bad877ff5952d9824659098ab7e8c88a118b9a49b61a619498ae46b43938f08"} Dec 11 09:46:24 crc kubenswrapper[4788]: I1211 09:46:24.677189 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jgd9g" event={"ID":"ef23e667-905e-41dd-a0ac-9739133fbbf9","Type":"ContainerStarted","Data":"3798fe725fc33adf8133e9ae4262c73f9178e094151000712e63f33c72d1add7"} Dec 11 09:46:24 crc kubenswrapper[4788]: I1211 09:46:24.706067 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-wx8sg" podStartSLOduration=4.706050301 podStartE2EDuration="4.706050301s" podCreationTimestamp="2025-12-11 09:46:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:46:24.688638827 +0000 UTC m=+1514.759418413" watchObservedRunningTime="2025-12-11 09:46:24.706050301 +0000 UTC m=+1514.776829887" Dec 11 09:46:24 crc kubenswrapper[4788]: I1211 09:46:24.763789 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-jgd9g" podStartSLOduration=3.763765373 podStartE2EDuration="3.763765373s" podCreationTimestamp="2025-12-11 09:46:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:46:24.754809044 +0000 UTC m=+1514.825588630" watchObservedRunningTime="2025-12-11 09:46:24.763765373 +0000 UTC m=+1514.834544959" Dec 11 09:46:25 crc kubenswrapper[4788]: I1211 09:46:25.204887 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 11 09:46:25 crc kubenswrapper[4788]: I1211 09:46:25.224081 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:25 crc kubenswrapper[4788]: I1211 09:46:25.699495 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" event={"ID":"107c813a-c386-4ffe-8206-ff0badb52f03","Type":"ContainerStarted","Data":"173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3"} Dec 11 09:46:25 crc kubenswrapper[4788]: I1211 09:46:25.700900 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:25 crc kubenswrapper[4788]: I1211 09:46:25.729823 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" podStartSLOduration=5.729806973 podStartE2EDuration="5.729806973s" podCreationTimestamp="2025-12-11 09:46:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:46:25.728829368 +0000 UTC m=+1515.799608954" watchObservedRunningTime="2025-12-11 09:46:25.729806973 +0000 UTC m=+1515.800586549" Dec 11 09:46:29 crc kubenswrapper[4788]: I1211 09:46:29.069646 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 11 09:46:30 crc kubenswrapper[4788]: I1211 09:46:30.747329 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3fc1052e-d06c-4464-b1af-015524e53bc6","Type":"ContainerStarted","Data":"0642ec94368029f4565ee2fc0f735622bd3d6f1ac8454bf6ef854d5ebca8b7a0"} Dec 11 09:46:30 crc kubenswrapper[4788]: I1211 09:46:30.750146 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s9tsw" event={"ID":"c0610a75-ae4d-4692-b4d5-7dbf70008a94","Type":"ContainerDied","Data":"fbc83cf311eb5850071217edb3e62ec40ff13f4740e7fb462d152b8bc672110d"} Dec 11 09:46:30 crc kubenswrapper[4788]: I1211 09:46:30.750423 4788 generic.go:334] "Generic (PLEG): container finished" podID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" containerID="fbc83cf311eb5850071217edb3e62ec40ff13f4740e7fb462d152b8bc672110d" exitCode=0 Dec 11 09:46:30 crc kubenswrapper[4788]: I1211 09:46:30.752412 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"653a9065-1ac9-40c5-bb56-c3544cd3e9f6","Type":"ContainerStarted","Data":"d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098"} Dec 11 09:46:30 crc kubenswrapper[4788]: I1211 09:46:30.754345 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0c8f7cd2-ce22-4f74-a099-721f8f7454bd","Type":"ContainerStarted","Data":"c93beaa41fbfde835d3ef49837a5f892666833bd81f6ff969d4feb33d54ff069"} Dec 11 09:46:30 crc kubenswrapper[4788]: I1211 09:46:30.755885 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9dde1c59-16f6-450d-9876-34611ef7ffd7","Type":"ContainerStarted","Data":"63e12444150868d695bea8d6b3467f09362e572836aee836f6b4cf118f81f17c"} Dec 11 09:46:31 crc kubenswrapper[4788]: I1211 09:46:31.461513 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:46:31 crc kubenswrapper[4788]: I1211 09:46:31.545519 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-dwrpb"] Dec 11 09:46:31 crc kubenswrapper[4788]: I1211 09:46:31.546419 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" podUID="1e46d499-ff77-4620-8c50-dcee4ac3af39" containerName="dnsmasq-dns" containerID="cri-o://217b9afd30618cc7171651fa083b826f22e3984e8c4ceace67b6cc157748535e" gracePeriod=10 Dec 11 09:46:31 crc kubenswrapper[4788]: I1211 09:46:31.771986 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="0c8f7cd2-ce22-4f74-a099-721f8f7454bd" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://c93beaa41fbfde835d3ef49837a5f892666833bd81f6ff969d4feb33d54ff069" gracePeriod=30 Dec 11 09:46:31 crc kubenswrapper[4788]: I1211 09:46:31.803345 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=4.459761022 podStartE2EDuration="11.80332932s" podCreationTimestamp="2025-12-11 09:46:20 +0000 UTC" firstStartedPulling="2025-12-11 09:46:22.119180618 +0000 UTC m=+1512.189960204" lastFinishedPulling="2025-12-11 09:46:29.462748916 +0000 UTC m=+1519.533528502" observedRunningTime="2025-12-11 09:46:31.797171103 +0000 UTC m=+1521.867950689" watchObservedRunningTime="2025-12-11 09:46:31.80332932 +0000 UTC m=+1521.874108906" Dec 11 09:46:31 crc kubenswrapper[4788]: I1211 09:46:31.824170 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=4.480921811 podStartE2EDuration="11.824148231s" podCreationTimestamp="2025-12-11 09:46:20 +0000 UTC" firstStartedPulling="2025-12-11 09:46:22.119436914 +0000 UTC m=+1512.190216500" lastFinishedPulling="2025-12-11 09:46:29.462663324 +0000 UTC m=+1519.533442920" observedRunningTime="2025-12-11 09:46:31.815823019 +0000 UTC m=+1521.886602615" watchObservedRunningTime="2025-12-11 09:46:31.824148231 +0000 UTC m=+1521.894927827" Dec 11 09:46:32 crc kubenswrapper[4788]: I1211 09:46:32.291245 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" podUID="1e46d499-ff77-4620-8c50-dcee4ac3af39" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.162:5353: connect: connection refused" Dec 11 09:46:33 crc kubenswrapper[4788]: I1211 09:46:33.797714 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3fc1052e-d06c-4464-b1af-015524e53bc6","Type":"ContainerStarted","Data":"4a6c2ab72339907e3d69096166d4350c56c8ba65cb2c644c85acbab83584cde2"} Dec 11 09:46:33 crc kubenswrapper[4788]: I1211 09:46:33.803201 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s9tsw" event={"ID":"c0610a75-ae4d-4692-b4d5-7dbf70008a94","Type":"ContainerStarted","Data":"942cd2b555a84fe99a4e5fbd9c8a9ade9685fe8989cd118a97a14d703192204a"} Dec 11 09:46:33 crc kubenswrapper[4788]: I1211 09:46:33.806807 4788 generic.go:334] "Generic (PLEG): container finished" podID="1e46d499-ff77-4620-8c50-dcee4ac3af39" containerID="217b9afd30618cc7171651fa083b826f22e3984e8c4ceace67b6cc157748535e" exitCode=0 Dec 11 09:46:33 crc kubenswrapper[4788]: I1211 09:46:33.806884 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" event={"ID":"1e46d499-ff77-4620-8c50-dcee4ac3af39","Type":"ContainerDied","Data":"217b9afd30618cc7171651fa083b826f22e3984e8c4ceace67b6cc157748535e"} Dec 11 09:46:33 crc kubenswrapper[4788]: I1211 09:46:33.809788 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9dde1c59-16f6-450d-9876-34611ef7ffd7","Type":"ContainerStarted","Data":"c7cb06b3dac817f24036a7a23a39bec203ea23133df8e322f18790a617a22ecd"} Dec 11 09:46:33 crc kubenswrapper[4788]: I1211 09:46:33.809987 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="9dde1c59-16f6-450d-9876-34611ef7ffd7" containerName="nova-metadata-log" containerID="cri-o://63e12444150868d695bea8d6b3467f09362e572836aee836f6b4cf118f81f17c" gracePeriod=30 Dec 11 09:46:33 crc kubenswrapper[4788]: I1211 09:46:33.810490 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="9dde1c59-16f6-450d-9876-34611ef7ffd7" containerName="nova-metadata-metadata" containerID="cri-o://c7cb06b3dac817f24036a7a23a39bec203ea23133df8e322f18790a617a22ecd" gracePeriod=30 Dec 11 09:46:33 crc kubenswrapper[4788]: I1211 09:46:33.836041 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=6.748783513 podStartE2EDuration="13.836021006s" podCreationTimestamp="2025-12-11 09:46:20 +0000 UTC" firstStartedPulling="2025-12-11 09:46:22.375510003 +0000 UTC m=+1512.446289589" lastFinishedPulling="2025-12-11 09:46:29.462747496 +0000 UTC m=+1519.533527082" observedRunningTime="2025-12-11 09:46:33.81934466 +0000 UTC m=+1523.890124246" watchObservedRunningTime="2025-12-11 09:46:33.836021006 +0000 UTC m=+1523.906800592" Dec 11 09:46:33 crc kubenswrapper[4788]: I1211 09:46:33.860677 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=6.668219299 podStartE2EDuration="13.860651684s" podCreationTimestamp="2025-12-11 09:46:20 +0000 UTC" firstStartedPulling="2025-12-11 09:46:22.109599614 +0000 UTC m=+1512.180379190" lastFinishedPulling="2025-12-11 09:46:29.302031999 +0000 UTC m=+1519.372811575" observedRunningTime="2025-12-11 09:46:33.853921092 +0000 UTC m=+1523.924700688" watchObservedRunningTime="2025-12-11 09:46:33.860651684 +0000 UTC m=+1523.931431270" Dec 11 09:46:33 crc kubenswrapper[4788]: I1211 09:46:33.892069 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s9tsw" podStartSLOduration=5.353365086 podStartE2EDuration="11.892047604s" podCreationTimestamp="2025-12-11 09:46:22 +0000 UTC" firstStartedPulling="2025-12-11 09:46:24.644482051 +0000 UTC m=+1514.715261627" lastFinishedPulling="2025-12-11 09:46:31.183164559 +0000 UTC m=+1521.253944145" observedRunningTime="2025-12-11 09:46:33.880595182 +0000 UTC m=+1523.951374758" watchObservedRunningTime="2025-12-11 09:46:33.892047604 +0000 UTC m=+1523.962827190" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.183448 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.190353 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsv7c\" (UniqueName: \"kubernetes.io/projected/1e46d499-ff77-4620-8c50-dcee4ac3af39-kube-api-access-xsv7c\") pod \"1e46d499-ff77-4620-8c50-dcee4ac3af39\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.190415 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-sb\") pod \"1e46d499-ff77-4620-8c50-dcee4ac3af39\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.214831 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e46d499-ff77-4620-8c50-dcee4ac3af39-kube-api-access-xsv7c" (OuterVolumeSpecName: "kube-api-access-xsv7c") pod "1e46d499-ff77-4620-8c50-dcee4ac3af39" (UID: "1e46d499-ff77-4620-8c50-dcee4ac3af39"). InnerVolumeSpecName "kube-api-access-xsv7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.295373 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-swift-storage-0\") pod \"1e46d499-ff77-4620-8c50-dcee4ac3af39\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.295436 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-svc\") pod \"1e46d499-ff77-4620-8c50-dcee4ac3af39\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.295541 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-nb\") pod \"1e46d499-ff77-4620-8c50-dcee4ac3af39\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.295572 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-config\") pod \"1e46d499-ff77-4620-8c50-dcee4ac3af39\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.296081 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsv7c\" (UniqueName: \"kubernetes.io/projected/1e46d499-ff77-4620-8c50-dcee4ac3af39-kube-api-access-xsv7c\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.436103 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-config" (OuterVolumeSpecName: "config") pod "1e46d499-ff77-4620-8c50-dcee4ac3af39" (UID: "1e46d499-ff77-4620-8c50-dcee4ac3af39"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.446248 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1e46d499-ff77-4620-8c50-dcee4ac3af39" (UID: "1e46d499-ff77-4620-8c50-dcee4ac3af39"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.473900 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1e46d499-ff77-4620-8c50-dcee4ac3af39" (UID: "1e46d499-ff77-4620-8c50-dcee4ac3af39"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.474386 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1e46d499-ff77-4620-8c50-dcee4ac3af39" (UID: "1e46d499-ff77-4620-8c50-dcee4ac3af39"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.500054 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1e46d499-ff77-4620-8c50-dcee4ac3af39" (UID: "1e46d499-ff77-4620-8c50-dcee4ac3af39"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.500323 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-nb\") pod \"1e46d499-ff77-4620-8c50-dcee4ac3af39\" (UID: \"1e46d499-ff77-4620-8c50-dcee4ac3af39\") " Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.501379 4788 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:34 crc kubenswrapper[4788]: W1211 09:46:34.501497 4788 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/1e46d499-ff77-4620-8c50-dcee4ac3af39/volumes/kubernetes.io~configmap/ovsdbserver-nb Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.501517 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1e46d499-ff77-4620-8c50-dcee4ac3af39" (UID: "1e46d499-ff77-4620-8c50-dcee4ac3af39"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.501707 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.501729 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.501740 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.501753 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e46d499-ff77-4620-8c50-dcee4ac3af39-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.831511 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" event={"ID":"1e46d499-ff77-4620-8c50-dcee4ac3af39","Type":"ContainerDied","Data":"494388ea419506afceb3e270afb35680db8a365419d952455401409d2c9a3a8b"} Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.831863 4788 scope.go:117] "RemoveContainer" containerID="217b9afd30618cc7171651fa083b826f22e3984e8c4ceace67b6cc157748535e" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.832043 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-dwrpb" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.842307 4788 generic.go:334] "Generic (PLEG): container finished" podID="9dde1c59-16f6-450d-9876-34611ef7ffd7" containerID="c7cb06b3dac817f24036a7a23a39bec203ea23133df8e322f18790a617a22ecd" exitCode=0 Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.842350 4788 generic.go:334] "Generic (PLEG): container finished" podID="9dde1c59-16f6-450d-9876-34611ef7ffd7" containerID="63e12444150868d695bea8d6b3467f09362e572836aee836f6b4cf118f81f17c" exitCode=143 Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.842425 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9dde1c59-16f6-450d-9876-34611ef7ffd7","Type":"ContainerDied","Data":"c7cb06b3dac817f24036a7a23a39bec203ea23133df8e322f18790a617a22ecd"} Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.842479 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9dde1c59-16f6-450d-9876-34611ef7ffd7","Type":"ContainerDied","Data":"63e12444150868d695bea8d6b3467f09362e572836aee836f6b4cf118f81f17c"} Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.865394 4788 scope.go:117] "RemoveContainer" containerID="654a738631f1e4467114a63eb810993afd615b4365eedef1eb14daaafe4b4de0" Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.889309 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-dwrpb"] Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.909802 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-dwrpb"] Dec 11 09:46:34 crc kubenswrapper[4788]: I1211 09:46:34.930440 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.119418 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dde1c59-16f6-450d-9876-34611ef7ffd7-logs\") pod \"9dde1c59-16f6-450d-9876-34611ef7ffd7\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.119770 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-combined-ca-bundle\") pod \"9dde1c59-16f6-450d-9876-34611ef7ffd7\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.119984 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-config-data\") pod \"9dde1c59-16f6-450d-9876-34611ef7ffd7\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.120163 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjq85\" (UniqueName: \"kubernetes.io/projected/9dde1c59-16f6-450d-9876-34611ef7ffd7-kube-api-access-xjq85\") pod \"9dde1c59-16f6-450d-9876-34611ef7ffd7\" (UID: \"9dde1c59-16f6-450d-9876-34611ef7ffd7\") " Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.120149 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9dde1c59-16f6-450d-9876-34611ef7ffd7-logs" (OuterVolumeSpecName: "logs") pod "9dde1c59-16f6-450d-9876-34611ef7ffd7" (UID: "9dde1c59-16f6-450d-9876-34611ef7ffd7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.120979 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dde1c59-16f6-450d-9876-34611ef7ffd7-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.128423 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dde1c59-16f6-450d-9876-34611ef7ffd7-kube-api-access-xjq85" (OuterVolumeSpecName: "kube-api-access-xjq85") pod "9dde1c59-16f6-450d-9876-34611ef7ffd7" (UID: "9dde1c59-16f6-450d-9876-34611ef7ffd7"). InnerVolumeSpecName "kube-api-access-xjq85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.148738 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-config-data" (OuterVolumeSpecName: "config-data") pod "9dde1c59-16f6-450d-9876-34611ef7ffd7" (UID: "9dde1c59-16f6-450d-9876-34611ef7ffd7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.152495 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9dde1c59-16f6-450d-9876-34611ef7ffd7" (UID: "9dde1c59-16f6-450d-9876-34611ef7ffd7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.223755 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.223793 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dde1c59-16f6-450d-9876-34611ef7ffd7-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.223808 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjq85\" (UniqueName: \"kubernetes.io/projected/9dde1c59-16f6-450d-9876-34611ef7ffd7-kube-api-access-xjq85\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.901509 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9dde1c59-16f6-450d-9876-34611ef7ffd7","Type":"ContainerDied","Data":"e263c067a78ef82862a22b8b59f17961abb9876b1d3ab7dbb966ee288d32e634"} Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.901580 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.901859 4788 scope.go:117] "RemoveContainer" containerID="c7cb06b3dac817f24036a7a23a39bec203ea23133df8e322f18790a617a22ecd" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.940696 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.944098 4788 scope.go:117] "RemoveContainer" containerID="63e12444150868d695bea8d6b3467f09362e572836aee836f6b4cf118f81f17c" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.954396 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.981595 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:35 crc kubenswrapper[4788]: E1211 09:46:35.982295 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e46d499-ff77-4620-8c50-dcee4ac3af39" containerName="dnsmasq-dns" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.982325 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e46d499-ff77-4620-8c50-dcee4ac3af39" containerName="dnsmasq-dns" Dec 11 09:46:35 crc kubenswrapper[4788]: E1211 09:46:35.982344 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e46d499-ff77-4620-8c50-dcee4ac3af39" containerName="init" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.982354 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e46d499-ff77-4620-8c50-dcee4ac3af39" containerName="init" Dec 11 09:46:35 crc kubenswrapper[4788]: E1211 09:46:35.982394 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dde1c59-16f6-450d-9876-34611ef7ffd7" containerName="nova-metadata-metadata" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.982404 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dde1c59-16f6-450d-9876-34611ef7ffd7" containerName="nova-metadata-metadata" Dec 11 09:46:35 crc kubenswrapper[4788]: E1211 09:46:35.982448 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dde1c59-16f6-450d-9876-34611ef7ffd7" containerName="nova-metadata-log" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.982458 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dde1c59-16f6-450d-9876-34611ef7ffd7" containerName="nova-metadata-log" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.982754 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dde1c59-16f6-450d-9876-34611ef7ffd7" containerName="nova-metadata-metadata" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.982778 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e46d499-ff77-4620-8c50-dcee4ac3af39" containerName="dnsmasq-dns" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.982796 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dde1c59-16f6-450d-9876-34611ef7ffd7" containerName="nova-metadata-log" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.984464 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.989453 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 11 09:46:35 crc kubenswrapper[4788]: I1211 09:46:35.989753 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.008958 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.091205 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.143401 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvgps\" (UniqueName: \"kubernetes.io/projected/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-kube-api-access-hvgps\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.143479 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-config-data\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.143504 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.143558 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.143579 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.143610 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-logs\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.245412 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.245475 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-logs\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.246005 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-logs\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.245649 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvgps\" (UniqueName: \"kubernetes.io/projected/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-kube-api-access-hvgps\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.246374 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-config-data\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.246429 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.252002 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.252142 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.262548 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-config-data\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.264520 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvgps\" (UniqueName: \"kubernetes.io/projected/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-kube-api-access-hvgps\") pod \"nova-metadata-0\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.305019 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.639123 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e46d499-ff77-4620-8c50-dcee4ac3af39" path="/var/lib/kubelet/pods/1e46d499-ff77-4620-8c50-dcee4ac3af39/volumes" Dec 11 09:46:36 crc kubenswrapper[4788]: I1211 09:46:36.640588 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dde1c59-16f6-450d-9876-34611ef7ffd7" path="/var/lib/kubelet/pods/9dde1c59-16f6-450d-9876-34611ef7ffd7/volumes" Dec 11 09:46:37 crc kubenswrapper[4788]: I1211 09:46:37.068050 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:37 crc kubenswrapper[4788]: I1211 09:46:37.923837 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24","Type":"ContainerStarted","Data":"ca22a7c1a07d2e41b31d9f14198bb1b77f09ed07d3527c2ef14bad3f88986cd6"} Dec 11 09:46:38 crc kubenswrapper[4788]: I1211 09:46:38.941691 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24","Type":"ContainerStarted","Data":"d0c8caedb487596775876cddc1a8d5ffd8713f4c87792d9bbd2e97b65e7e3f7e"} Dec 11 09:46:38 crc kubenswrapper[4788]: I1211 09:46:38.947835 4788 generic.go:334] "Generic (PLEG): container finished" podID="2e204962-9911-4a7c-b2da-b4a614f548a6" containerID="140fe4839241daf0c592246ae12ef1c55791f6e7c12445313a4b142c69fa8a0d" exitCode=0 Dec 11 09:46:38 crc kubenswrapper[4788]: I1211 09:46:38.947877 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-wx8sg" event={"ID":"2e204962-9911-4a7c-b2da-b4a614f548a6","Type":"ContainerDied","Data":"140fe4839241daf0c592246ae12ef1c55791f6e7c12445313a4b142c69fa8a0d"} Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.328880 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.452489 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-combined-ca-bundle\") pod \"2e204962-9911-4a7c-b2da-b4a614f548a6\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.452560 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-scripts\") pod \"2e204962-9911-4a7c-b2da-b4a614f548a6\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.452710 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtmzh\" (UniqueName: \"kubernetes.io/projected/2e204962-9911-4a7c-b2da-b4a614f548a6-kube-api-access-qtmzh\") pod \"2e204962-9911-4a7c-b2da-b4a614f548a6\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.452745 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-config-data\") pod \"2e204962-9911-4a7c-b2da-b4a614f548a6\" (UID: \"2e204962-9911-4a7c-b2da-b4a614f548a6\") " Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.459297 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e204962-9911-4a7c-b2da-b4a614f548a6-kube-api-access-qtmzh" (OuterVolumeSpecName: "kube-api-access-qtmzh") pod "2e204962-9911-4a7c-b2da-b4a614f548a6" (UID: "2e204962-9911-4a7c-b2da-b4a614f548a6"). InnerVolumeSpecName "kube-api-access-qtmzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.470082 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-scripts" (OuterVolumeSpecName: "scripts") pod "2e204962-9911-4a7c-b2da-b4a614f548a6" (UID: "2e204962-9911-4a7c-b2da-b4a614f548a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.484915 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-config-data" (OuterVolumeSpecName: "config-data") pod "2e204962-9911-4a7c-b2da-b4a614f548a6" (UID: "2e204962-9911-4a7c-b2da-b4a614f548a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.491057 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e204962-9911-4a7c-b2da-b4a614f548a6" (UID: "2e204962-9911-4a7c-b2da-b4a614f548a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.556032 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.556467 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.556492 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtmzh\" (UniqueName: \"kubernetes.io/projected/2e204962-9911-4a7c-b2da-b4a614f548a6-kube-api-access-qtmzh\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.556504 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e204962-9911-4a7c-b2da-b4a614f548a6-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.968278 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-wx8sg" event={"ID":"2e204962-9911-4a7c-b2da-b4a614f548a6","Type":"ContainerDied","Data":"7e2c153c07608325975cf2e746dbdceb7b9d4c98ae0f38d58e3b40f6f1aa98a3"} Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.968630 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e2c153c07608325975cf2e746dbdceb7b9d4c98ae0f38d58e3b40f6f1aa98a3" Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.968323 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-wx8sg" Dec 11 09:46:40 crc kubenswrapper[4788]: I1211 09:46:40.970299 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24","Type":"ContainerStarted","Data":"858ce7a72b7d7b4371acc8a14e419a76c33548a03b5cb1317078f2c680317668"} Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.011033 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=6.011014476 podStartE2EDuration="6.011014476s" podCreationTimestamp="2025-12-11 09:46:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:46:40.994060224 +0000 UTC m=+1531.064839810" watchObservedRunningTime="2025-12-11 09:46:41.011014476 +0000 UTC m=+1531.081794062" Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.143658 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.149422 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.149898 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3fc1052e-d06c-4464-b1af-015524e53bc6" containerName="nova-api-log" containerID="cri-o://0642ec94368029f4565ee2fc0f735622bd3d6f1ac8454bf6ef854d5ebca8b7a0" gracePeriod=30 Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.150004 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3fc1052e-d06c-4464-b1af-015524e53bc6" containerName="nova-api-api" containerID="cri-o://4a6c2ab72339907e3d69096166d4350c56c8ba65cb2c644c85acbab83584cde2" gracePeriod=30 Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.168029 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.182762 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.185241 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.305939 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.306051 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.982720 4788 generic.go:334] "Generic (PLEG): container finished" podID="3fc1052e-d06c-4464-b1af-015524e53bc6" containerID="4a6c2ab72339907e3d69096166d4350c56c8ba65cb2c644c85acbab83584cde2" exitCode=0 Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.982761 4788 generic.go:334] "Generic (PLEG): container finished" podID="3fc1052e-d06c-4464-b1af-015524e53bc6" containerID="0642ec94368029f4565ee2fc0f735622bd3d6f1ac8454bf6ef854d5ebca8b7a0" exitCode=143 Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.982772 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3fc1052e-d06c-4464-b1af-015524e53bc6","Type":"ContainerDied","Data":"4a6c2ab72339907e3d69096166d4350c56c8ba65cb2c644c85acbab83584cde2"} Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.982852 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3fc1052e-d06c-4464-b1af-015524e53bc6","Type":"ContainerDied","Data":"0642ec94368029f4565ee2fc0f735622bd3d6f1ac8454bf6ef854d5ebca8b7a0"} Dec 11 09:46:41 crc kubenswrapper[4788]: I1211 09:46:41.983353 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="653a9065-1ac9-40c5-bb56-c3544cd3e9f6" containerName="nova-scheduler-scheduler" containerID="cri-o://d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098" gracePeriod=30 Dec 11 09:46:41 crc kubenswrapper[4788]: E1211 09:46:41.987411 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:46:41 crc kubenswrapper[4788]: E1211 09:46:41.989523 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:46:41 crc kubenswrapper[4788]: E1211 09:46:41.991401 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:46:41 crc kubenswrapper[4788]: E1211 09:46:41.991447 4788 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="653a9065-1ac9-40c5-bb56-c3544cd3e9f6" containerName="nova-scheduler-scheduler" Dec 11 09:46:42 crc kubenswrapper[4788]: I1211 09:46:42.616456 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:42 crc kubenswrapper[4788]: I1211 09:46:42.616523 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:42 crc kubenswrapper[4788]: I1211 09:46:42.689590 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:42 crc kubenswrapper[4788]: I1211 09:46:42.991203 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" containerName="nova-metadata-log" containerID="cri-o://d0c8caedb487596775876cddc1a8d5ffd8713f4c87792d9bbd2e97b65e7e3f7e" gracePeriod=30 Dec 11 09:46:42 crc kubenswrapper[4788]: I1211 09:46:42.991378 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" containerName="nova-metadata-metadata" containerID="cri-o://858ce7a72b7d7b4371acc8a14e419a76c33548a03b5cb1317078f2c680317668" gracePeriod=30 Dec 11 09:46:43 crc kubenswrapper[4788]: I1211 09:46:43.044478 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:43 crc kubenswrapper[4788]: I1211 09:46:43.100767 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s9tsw"] Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.004412 4788 generic.go:334] "Generic (PLEG): container finished" podID="b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" containerID="858ce7a72b7d7b4371acc8a14e419a76c33548a03b5cb1317078f2c680317668" exitCode=0 Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.004453 4788 generic.go:334] "Generic (PLEG): container finished" podID="b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" containerID="d0c8caedb487596775876cddc1a8d5ffd8713f4c87792d9bbd2e97b65e7e3f7e" exitCode=143 Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.004467 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24","Type":"ContainerDied","Data":"858ce7a72b7d7b4371acc8a14e419a76c33548a03b5cb1317078f2c680317668"} Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.004515 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24","Type":"ContainerDied","Data":"d0c8caedb487596775876cddc1a8d5ffd8713f4c87792d9bbd2e97b65e7e3f7e"} Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.424595 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.531958 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc1052e-d06c-4464-b1af-015524e53bc6-logs\") pod \"3fc1052e-d06c-4464-b1af-015524e53bc6\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.532347 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8l789\" (UniqueName: \"kubernetes.io/projected/3fc1052e-d06c-4464-b1af-015524e53bc6-kube-api-access-8l789\") pod \"3fc1052e-d06c-4464-b1af-015524e53bc6\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.532423 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-config-data\") pod \"3fc1052e-d06c-4464-b1af-015524e53bc6\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.532498 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-combined-ca-bundle\") pod \"3fc1052e-d06c-4464-b1af-015524e53bc6\" (UID: \"3fc1052e-d06c-4464-b1af-015524e53bc6\") " Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.534709 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fc1052e-d06c-4464-b1af-015524e53bc6-logs" (OuterVolumeSpecName: "logs") pod "3fc1052e-d06c-4464-b1af-015524e53bc6" (UID: "3fc1052e-d06c-4464-b1af-015524e53bc6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.546844 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fc1052e-d06c-4464-b1af-015524e53bc6-kube-api-access-8l789" (OuterVolumeSpecName: "kube-api-access-8l789") pod "3fc1052e-d06c-4464-b1af-015524e53bc6" (UID: "3fc1052e-d06c-4464-b1af-015524e53bc6"). InnerVolumeSpecName "kube-api-access-8l789". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.565557 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3fc1052e-d06c-4464-b1af-015524e53bc6" (UID: "3fc1052e-d06c-4464-b1af-015524e53bc6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.568162 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-config-data" (OuterVolumeSpecName: "config-data") pod "3fc1052e-d06c-4464-b1af-015524e53bc6" (UID: "3fc1052e-d06c-4464-b1af-015524e53bc6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.601825 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.634386 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3fc1052e-d06c-4464-b1af-015524e53bc6-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.634423 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8l789\" (UniqueName: \"kubernetes.io/projected/3fc1052e-d06c-4464-b1af-015524e53bc6-kube-api-access-8l789\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.634435 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.634443 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fc1052e-d06c-4464-b1af-015524e53bc6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.736199 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-logs\") pod \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.736382 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-combined-ca-bundle\") pod \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.736684 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-nova-metadata-tls-certs\") pod \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.736720 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-logs" (OuterVolumeSpecName: "logs") pod "b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" (UID: "b3ef3ceb-b316-4549-a0b3-f1b2f1013e24"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.737672 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvgps\" (UniqueName: \"kubernetes.io/projected/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-kube-api-access-hvgps\") pod \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.737720 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-config-data\") pod \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\" (UID: \"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24\") " Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.738555 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.742560 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-kube-api-access-hvgps" (OuterVolumeSpecName: "kube-api-access-hvgps") pod "b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" (UID: "b3ef3ceb-b316-4549-a0b3-f1b2f1013e24"). InnerVolumeSpecName "kube-api-access-hvgps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.765553 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-config-data" (OuterVolumeSpecName: "config-data") pod "b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" (UID: "b3ef3ceb-b316-4549-a0b3-f1b2f1013e24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.767211 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" (UID: "b3ef3ceb-b316-4549-a0b3-f1b2f1013e24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.793758 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" (UID: "b3ef3ceb-b316-4549-a0b3-f1b2f1013e24"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.841292 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.841366 4788 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.841388 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvgps\" (UniqueName: \"kubernetes.io/projected/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-kube-api-access-hvgps\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:44 crc kubenswrapper[4788]: I1211 09:46:44.841400 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.018349 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3fc1052e-d06c-4464-b1af-015524e53bc6","Type":"ContainerDied","Data":"7410e07fc4bf551432fa1a3edc4d7733f6cd2a41363a77c68556d73ec4db2b37"} Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.018419 4788 scope.go:117] "RemoveContainer" containerID="4a6c2ab72339907e3d69096166d4350c56c8ba65cb2c644c85acbab83584cde2" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.018357 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.025211 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-s9tsw" podUID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" containerName="registry-server" containerID="cri-o://942cd2b555a84fe99a4e5fbd9c8a9ade9685fe8989cd118a97a14d703192204a" gracePeriod=2 Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.025482 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3ef3ceb-b316-4549-a0b3-f1b2f1013e24","Type":"ContainerDied","Data":"ca22a7c1a07d2e41b31d9f14198bb1b77f09ed07d3527c2ef14bad3f88986cd6"} Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.025487 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.057760 4788 scope.go:117] "RemoveContainer" containerID="0642ec94368029f4565ee2fc0f735622bd3d6f1ac8454bf6ef854d5ebca8b7a0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.069288 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.087643 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.097564 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.108933 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.132052 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 11 09:46:45 crc kubenswrapper[4788]: E1211 09:46:45.133092 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e204962-9911-4a7c-b2da-b4a614f548a6" containerName="nova-manage" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.133110 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e204962-9911-4a7c-b2da-b4a614f548a6" containerName="nova-manage" Dec 11 09:46:45 crc kubenswrapper[4788]: E1211 09:46:45.133125 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fc1052e-d06c-4464-b1af-015524e53bc6" containerName="nova-api-api" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.133131 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fc1052e-d06c-4464-b1af-015524e53bc6" containerName="nova-api-api" Dec 11 09:46:45 crc kubenswrapper[4788]: E1211 09:46:45.133161 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" containerName="nova-metadata-metadata" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.133168 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" containerName="nova-metadata-metadata" Dec 11 09:46:45 crc kubenswrapper[4788]: E1211 09:46:45.133186 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" containerName="nova-metadata-log" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.133193 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" containerName="nova-metadata-log" Dec 11 09:46:45 crc kubenswrapper[4788]: E1211 09:46:45.133210 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fc1052e-d06c-4464-b1af-015524e53bc6" containerName="nova-api-log" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.133216 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fc1052e-d06c-4464-b1af-015524e53bc6" containerName="nova-api-log" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.133585 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e204962-9911-4a7c-b2da-b4a614f548a6" containerName="nova-manage" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.133608 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" containerName="nova-metadata-metadata" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.133634 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fc1052e-d06c-4464-b1af-015524e53bc6" containerName="nova-api-api" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.133647 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fc1052e-d06c-4464-b1af-015524e53bc6" containerName="nova-api-log" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.133669 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" containerName="nova-metadata-log" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.137205 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.141729 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.161777 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.163723 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.166922 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.166932 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.179835 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.189849 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.254814 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a972367d-1100-4620-9a99-929961dd63b8-logs\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.255286 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-config-data\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.255439 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.255460 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbgbv\" (UniqueName: \"kubernetes.io/projected/a972367d-1100-4620-9a99-929961dd63b8-kube-api-access-nbgbv\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.357721 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.357800 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.357823 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbgbv\" (UniqueName: \"kubernetes.io/projected/a972367d-1100-4620-9a99-929961dd63b8-kube-api-access-nbgbv\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.358023 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89b62ded-d0fb-4df2-b6a0-1c72d1575865-logs\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.358103 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a972367d-1100-4620-9a99-929961dd63b8-logs\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.358321 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jjj9\" (UniqueName: \"kubernetes.io/projected/89b62ded-d0fb-4df2-b6a0-1c72d1575865-kube-api-access-5jjj9\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.358457 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-config-data\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.358932 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a972367d-1100-4620-9a99-929961dd63b8-logs\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.360123 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.360265 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-config-data\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.365497 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.375410 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-config-data\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.375857 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbgbv\" (UniqueName: \"kubernetes.io/projected/a972367d-1100-4620-9a99-929961dd63b8-kube-api-access-nbgbv\") pod \"nova-api-0\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.396670 4788 scope.go:117] "RemoveContainer" containerID="858ce7a72b7d7b4371acc8a14e419a76c33548a03b5cb1317078f2c680317668" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.426954 4788 scope.go:117] "RemoveContainer" containerID="d0c8caedb487596775876cddc1a8d5ffd8713f4c87792d9bbd2e97b65e7e3f7e" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.462042 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jjj9\" (UniqueName: \"kubernetes.io/projected/89b62ded-d0fb-4df2-b6a0-1c72d1575865-kube-api-access-5jjj9\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.462126 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.462147 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-config-data\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.462202 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.462267 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89b62ded-d0fb-4df2-b6a0-1c72d1575865-logs\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.462710 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89b62ded-d0fb-4df2-b6a0-1c72d1575865-logs\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.465443 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.465563 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.467791 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.467933 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-config-data\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.480466 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jjj9\" (UniqueName: \"kubernetes.io/projected/89b62ded-d0fb-4df2-b6a0-1c72d1575865-kube-api-access-5jjj9\") pod \"nova-metadata-0\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.485389 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.938071 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:46:45 crc kubenswrapper[4788]: I1211 09:46:45.995074 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:46:46 crc kubenswrapper[4788]: W1211 09:46:46.002797 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89b62ded_d0fb_4df2_b6a0_1c72d1575865.slice/crio-5f9581b3d65d83c7528573712dda1d15c7dd312b9bf3062ccf21182e8fb6caad WatchSource:0}: Error finding container 5f9581b3d65d83c7528573712dda1d15c7dd312b9bf3062ccf21182e8fb6caad: Status 404 returned error can't find the container with id 5f9581b3d65d83c7528573712dda1d15c7dd312b9bf3062ccf21182e8fb6caad Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.036387 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a972367d-1100-4620-9a99-929961dd63b8","Type":"ContainerStarted","Data":"0985dc5c737a5a9fa7c755f3911dc546accd79ffa36761f636041975fb0541ce"} Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.040839 4788 generic.go:334] "Generic (PLEG): container finished" podID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" containerID="942cd2b555a84fe99a4e5fbd9c8a9ade9685fe8989cd118a97a14d703192204a" exitCode=0 Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.040907 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s9tsw" event={"ID":"c0610a75-ae4d-4692-b4d5-7dbf70008a94","Type":"ContainerDied","Data":"942cd2b555a84fe99a4e5fbd9c8a9ade9685fe8989cd118a97a14d703192204a"} Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.042521 4788 generic.go:334] "Generic (PLEG): container finished" podID="653a9065-1ac9-40c5-bb56-c3544cd3e9f6" containerID="d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098" exitCode=0 Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.042598 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"653a9065-1ac9-40c5-bb56-c3544cd3e9f6","Type":"ContainerDied","Data":"d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098"} Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.043424 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"89b62ded-d0fb-4df2-b6a0-1c72d1575865","Type":"ContainerStarted","Data":"5f9581b3d65d83c7528573712dda1d15c7dd312b9bf3062ccf21182e8fb6caad"} Dec 11 09:46:46 crc kubenswrapper[4788]: E1211 09:46:46.145173 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098 is running failed: container process not found" containerID="d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:46:46 crc kubenswrapper[4788]: E1211 09:46:46.145921 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098 is running failed: container process not found" containerID="d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:46:46 crc kubenswrapper[4788]: E1211 09:46:46.146544 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098 is running failed: container process not found" containerID="d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:46:46 crc kubenswrapper[4788]: E1211 09:46:46.146580 4788 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="653a9065-1ac9-40c5-bb56-c3544cd3e9f6" containerName="nova-scheduler-scheduler" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.152013 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.281782 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-config-data\") pod \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.281969 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-combined-ca-bundle\") pod \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.282080 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvg7l\" (UniqueName: \"kubernetes.io/projected/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-kube-api-access-zvg7l\") pod \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\" (UID: \"653a9065-1ac9-40c5-bb56-c3544cd3e9f6\") " Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.287519 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-kube-api-access-zvg7l" (OuterVolumeSpecName: "kube-api-access-zvg7l") pod "653a9065-1ac9-40c5-bb56-c3544cd3e9f6" (UID: "653a9065-1ac9-40c5-bb56-c3544cd3e9f6"). InnerVolumeSpecName "kube-api-access-zvg7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.316429 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "653a9065-1ac9-40c5-bb56-c3544cd3e9f6" (UID: "653a9065-1ac9-40c5-bb56-c3544cd3e9f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.318278 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-config-data" (OuterVolumeSpecName: "config-data") pod "653a9065-1ac9-40c5-bb56-c3544cd3e9f6" (UID: "653a9065-1ac9-40c5-bb56-c3544cd3e9f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.385627 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.385665 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.385683 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvg7l\" (UniqueName: \"kubernetes.io/projected/653a9065-1ac9-40c5-bb56-c3544cd3e9f6-kube-api-access-zvg7l\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.554178 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fc1052e-d06c-4464-b1af-015524e53bc6" path="/var/lib/kubelet/pods/3fc1052e-d06c-4464-b1af-015524e53bc6/volumes" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.557454 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3ef3ceb-b316-4549-a0b3-f1b2f1013e24" path="/var/lib/kubelet/pods/b3ef3ceb-b316-4549-a0b3-f1b2f1013e24/volumes" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.609569 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.717278 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wjg5\" (UniqueName: \"kubernetes.io/projected/c0610a75-ae4d-4692-b4d5-7dbf70008a94-kube-api-access-6wjg5\") pod \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.717431 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-utilities\") pod \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.717479 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-catalog-content\") pod \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\" (UID: \"c0610a75-ae4d-4692-b4d5-7dbf70008a94\") " Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.721052 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-utilities" (OuterVolumeSpecName: "utilities") pod "c0610a75-ae4d-4692-b4d5-7dbf70008a94" (UID: "c0610a75-ae4d-4692-b4d5-7dbf70008a94"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.731630 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0610a75-ae4d-4692-b4d5-7dbf70008a94-kube-api-access-6wjg5" (OuterVolumeSpecName: "kube-api-access-6wjg5") pod "c0610a75-ae4d-4692-b4d5-7dbf70008a94" (UID: "c0610a75-ae4d-4692-b4d5-7dbf70008a94"). InnerVolumeSpecName "kube-api-access-6wjg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.805700 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0610a75-ae4d-4692-b4d5-7dbf70008a94" (UID: "c0610a75-ae4d-4692-b4d5-7dbf70008a94"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.820252 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.820315 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0610a75-ae4d-4692-b4d5-7dbf70008a94-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:46 crc kubenswrapper[4788]: I1211 09:46:46.820343 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wjg5\" (UniqueName: \"kubernetes.io/projected/c0610a75-ae4d-4692-b4d5-7dbf70008a94-kube-api-access-6wjg5\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.055343 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s9tsw" event={"ID":"c0610a75-ae4d-4692-b4d5-7dbf70008a94","Type":"ContainerDied","Data":"4dedd454875eaa5a6091f540a653f0c227f5fbc931eeb966a27a5421ac145648"} Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.055394 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s9tsw" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.055409 4788 scope.go:117] "RemoveContainer" containerID="942cd2b555a84fe99a4e5fbd9c8a9ade9685fe8989cd118a97a14d703192204a" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.058281 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"653a9065-1ac9-40c5-bb56-c3544cd3e9f6","Type":"ContainerDied","Data":"66fd348669f4f24fa9ca71c7b4ace97f4a2c2b02e3e0aac161b7266d27266552"} Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.058365 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.061745 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"89b62ded-d0fb-4df2-b6a0-1c72d1575865","Type":"ContainerStarted","Data":"da468444b2e89b4e5c979b6fc23b53adfbc0421fa24be7380f1ef7bee21f1e56"} Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.064417 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a972367d-1100-4620-9a99-929961dd63b8","Type":"ContainerStarted","Data":"f813c73ee7daea1578bfbedcfdc02aa073fc82192e0ef5afd804593c5fb2bb26"} Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.085928 4788 scope.go:117] "RemoveContainer" containerID="fbc83cf311eb5850071217edb3e62ec40ff13f4740e7fb462d152b8bc672110d" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.094982 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.110594 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.124009 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s9tsw"] Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.132737 4788 scope.go:117] "RemoveContainer" containerID="8825ffd27731a6f93be177576e584b31a473128aab27b41ef42940a29c5d1d00" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.135010 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-s9tsw"] Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.146544 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:46:47 crc kubenswrapper[4788]: E1211 09:46:47.147024 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" containerName="extract-utilities" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.147042 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" containerName="extract-utilities" Dec 11 09:46:47 crc kubenswrapper[4788]: E1211 09:46:47.147073 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="653a9065-1ac9-40c5-bb56-c3544cd3e9f6" containerName="nova-scheduler-scheduler" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.147081 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="653a9065-1ac9-40c5-bb56-c3544cd3e9f6" containerName="nova-scheduler-scheduler" Dec 11 09:46:47 crc kubenswrapper[4788]: E1211 09:46:47.147104 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" containerName="extract-content" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.147113 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" containerName="extract-content" Dec 11 09:46:47 crc kubenswrapper[4788]: E1211 09:46:47.147137 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" containerName="registry-server" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.147144 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" containerName="registry-server" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.147347 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="653a9065-1ac9-40c5-bb56-c3544cd3e9f6" containerName="nova-scheduler-scheduler" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.147372 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" containerName="registry-server" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.148111 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.150512 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.158569 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.175572 4788 scope.go:117] "RemoveContainer" containerID="d8d9f69066100a1078e3863cda71b437f1b820311c41cdb6e353f5665751f098" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.331177 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-config-data\") pod \"nova-scheduler-0\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.331870 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6z5z\" (UniqueName: \"kubernetes.io/projected/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-kube-api-access-m6z5z\") pod \"nova-scheduler-0\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.331915 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.433311 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6z5z\" (UniqueName: \"kubernetes.io/projected/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-kube-api-access-m6z5z\") pod \"nova-scheduler-0\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.433379 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.433438 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-config-data\") pod \"nova-scheduler-0\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.439532 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.439808 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-config-data\") pod \"nova-scheduler-0\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.452824 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6z5z\" (UniqueName: \"kubernetes.io/projected/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-kube-api-access-m6z5z\") pod \"nova-scheduler-0\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " pod="openstack/nova-scheduler-0" Dec 11 09:46:47 crc kubenswrapper[4788]: I1211 09:46:47.518063 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 11 09:46:48 crc kubenswrapper[4788]: I1211 09:46:48.009209 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:46:48 crc kubenswrapper[4788]: W1211 09:46:48.017072 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68d2836e_dfca_46dd_bfc5_58f6eaf649e5.slice/crio-e9bb4a320d843a954577b3ba14ac1ba5193d99165f88a4b4c47732a45484e3e0 WatchSource:0}: Error finding container e9bb4a320d843a954577b3ba14ac1ba5193d99165f88a4b4c47732a45484e3e0: Status 404 returned error can't find the container with id e9bb4a320d843a954577b3ba14ac1ba5193d99165f88a4b4c47732a45484e3e0 Dec 11 09:46:48 crc kubenswrapper[4788]: I1211 09:46:48.076357 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"68d2836e-dfca-46dd-bfc5-58f6eaf649e5","Type":"ContainerStarted","Data":"e9bb4a320d843a954577b3ba14ac1ba5193d99165f88a4b4c47732a45484e3e0"} Dec 11 09:46:48 crc kubenswrapper[4788]: I1211 09:46:48.081305 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"89b62ded-d0fb-4df2-b6a0-1c72d1575865","Type":"ContainerStarted","Data":"a6a7fbecd1fc2c3a16e347f89e24f0cdbc6ef6f74f2a0a3e8f59dc9e037f2d10"} Dec 11 09:46:48 crc kubenswrapper[4788]: I1211 09:46:48.085061 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a972367d-1100-4620-9a99-929961dd63b8","Type":"ContainerStarted","Data":"7c5e9b027193a3b551de910b6d47fc953c3ad195794311dd2a53ef20e102e9ad"} Dec 11 09:46:48 crc kubenswrapper[4788]: I1211 09:46:48.100859 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.100838146 podStartE2EDuration="3.100838146s" podCreationTimestamp="2025-12-11 09:46:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:46:48.100504527 +0000 UTC m=+1538.171284123" watchObservedRunningTime="2025-12-11 09:46:48.100838146 +0000 UTC m=+1538.171617732" Dec 11 09:46:48 crc kubenswrapper[4788]: I1211 09:46:48.130117 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.130098802 podStartE2EDuration="3.130098802s" podCreationTimestamp="2025-12-11 09:46:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:46:48.120147858 +0000 UTC m=+1538.190927464" watchObservedRunningTime="2025-12-11 09:46:48.130098802 +0000 UTC m=+1538.200878388" Dec 11 09:46:48 crc kubenswrapper[4788]: I1211 09:46:48.506302 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="653a9065-1ac9-40c5-bb56-c3544cd3e9f6" path="/var/lib/kubelet/pods/653a9065-1ac9-40c5-bb56-c3544cd3e9f6/volumes" Dec 11 09:46:48 crc kubenswrapper[4788]: I1211 09:46:48.506842 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0610a75-ae4d-4692-b4d5-7dbf70008a94" path="/var/lib/kubelet/pods/c0610a75-ae4d-4692-b4d5-7dbf70008a94/volumes" Dec 11 09:46:49 crc kubenswrapper[4788]: I1211 09:46:49.095598 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"68d2836e-dfca-46dd-bfc5-58f6eaf649e5","Type":"ContainerStarted","Data":"e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe"} Dec 11 09:46:49 crc kubenswrapper[4788]: I1211 09:46:49.124506 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.124483623 podStartE2EDuration="2.124483623s" podCreationTimestamp="2025-12-11 09:46:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:46:49.123004116 +0000 UTC m=+1539.193783722" watchObservedRunningTime="2025-12-11 09:46:49.124483623 +0000 UTC m=+1539.195263209" Dec 11 09:46:50 crc kubenswrapper[4788]: I1211 09:46:50.485896 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 11 09:46:50 crc kubenswrapper[4788]: I1211 09:46:50.486412 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 11 09:46:51 crc kubenswrapper[4788]: I1211 09:46:51.369154 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:46:51 crc kubenswrapper[4788]: I1211 09:46:51.369244 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:46:52 crc kubenswrapper[4788]: I1211 09:46:52.518756 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 11 09:46:55 crc kubenswrapper[4788]: I1211 09:46:55.467031 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 11 09:46:55 crc kubenswrapper[4788]: I1211 09:46:55.467767 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 11 09:46:55 crc kubenswrapper[4788]: I1211 09:46:55.486324 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 11 09:46:55 crc kubenswrapper[4788]: I1211 09:46:55.486385 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 11 09:46:56 crc kubenswrapper[4788]: I1211 09:46:56.549530 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a972367d-1100-4620-9a99-929961dd63b8" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 11 09:46:56 crc kubenswrapper[4788]: I1211 09:46:56.562457 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a972367d-1100-4620-9a99-929961dd63b8" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 11 09:46:56 crc kubenswrapper[4788]: I1211 09:46:56.563186 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 11 09:46:56 crc kubenswrapper[4788]: I1211 09:46:56.564216 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 11 09:46:57 crc kubenswrapper[4788]: I1211 09:46:57.200421 4788 generic.go:334] "Generic (PLEG): container finished" podID="ef23e667-905e-41dd-a0ac-9739133fbbf9" containerID="3798fe725fc33adf8133e9ae4262c73f9178e094151000712e63f33c72d1add7" exitCode=0 Dec 11 09:46:57 crc kubenswrapper[4788]: I1211 09:46:57.200479 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jgd9g" event={"ID":"ef23e667-905e-41dd-a0ac-9739133fbbf9","Type":"ContainerDied","Data":"3798fe725fc33adf8133e9ae4262c73f9178e094151000712e63f33c72d1add7"} Dec 11 09:46:57 crc kubenswrapper[4788]: I1211 09:46:57.518645 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 11 09:46:57 crc kubenswrapper[4788]: I1211 09:46:57.550390 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.239895 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.581627 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.777165 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppd9m\" (UniqueName: \"kubernetes.io/projected/ef23e667-905e-41dd-a0ac-9739133fbbf9-kube-api-access-ppd9m\") pod \"ef23e667-905e-41dd-a0ac-9739133fbbf9\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.777216 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-config-data\") pod \"ef23e667-905e-41dd-a0ac-9739133fbbf9\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.777328 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-combined-ca-bundle\") pod \"ef23e667-905e-41dd-a0ac-9739133fbbf9\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.777399 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-scripts\") pod \"ef23e667-905e-41dd-a0ac-9739133fbbf9\" (UID: \"ef23e667-905e-41dd-a0ac-9739133fbbf9\") " Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.787363 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef23e667-905e-41dd-a0ac-9739133fbbf9-kube-api-access-ppd9m" (OuterVolumeSpecName: "kube-api-access-ppd9m") pod "ef23e667-905e-41dd-a0ac-9739133fbbf9" (UID: "ef23e667-905e-41dd-a0ac-9739133fbbf9"). InnerVolumeSpecName "kube-api-access-ppd9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.804981 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-scripts" (OuterVolumeSpecName: "scripts") pod "ef23e667-905e-41dd-a0ac-9739133fbbf9" (UID: "ef23e667-905e-41dd-a0ac-9739133fbbf9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.811464 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef23e667-905e-41dd-a0ac-9739133fbbf9" (UID: "ef23e667-905e-41dd-a0ac-9739133fbbf9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.821497 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-config-data" (OuterVolumeSpecName: "config-data") pod "ef23e667-905e-41dd-a0ac-9739133fbbf9" (UID: "ef23e667-905e-41dd-a0ac-9739133fbbf9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.880109 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.880147 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppd9m\" (UniqueName: \"kubernetes.io/projected/ef23e667-905e-41dd-a0ac-9739133fbbf9-kube-api-access-ppd9m\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.880159 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:58 crc kubenswrapper[4788]: I1211 09:46:58.880168 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef23e667-905e-41dd-a0ac-9739133fbbf9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.220839 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jgd9g" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.220838 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jgd9g" event={"ID":"ef23e667-905e-41dd-a0ac-9739133fbbf9","Type":"ContainerDied","Data":"6d1b287b16b02084c05bafc0458dcca44a9b717f39438b55ed970e18c4e5c090"} Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.221420 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d1b287b16b02084c05bafc0458dcca44a9b717f39438b55ed970e18c4e5c090" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.310248 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 11 09:46:59 crc kubenswrapper[4788]: E1211 09:46:59.311191 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef23e667-905e-41dd-a0ac-9739133fbbf9" containerName="nova-cell1-conductor-db-sync" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.311215 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef23e667-905e-41dd-a0ac-9739133fbbf9" containerName="nova-cell1-conductor-db-sync" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.311464 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef23e667-905e-41dd-a0ac-9739133fbbf9" containerName="nova-cell1-conductor-db-sync" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.312348 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.317149 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.320187 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.492276 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-588b4\" (UniqueName: \"kubernetes.io/projected/d22ef65f-e312-488f-8607-514c224411e8-kube-api-access-588b4\") pod \"nova-cell1-conductor-0\" (UID: \"d22ef65f-e312-488f-8607-514c224411e8\") " pod="openstack/nova-cell1-conductor-0" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.492753 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d22ef65f-e312-488f-8607-514c224411e8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d22ef65f-e312-488f-8607-514c224411e8\") " pod="openstack/nova-cell1-conductor-0" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.492960 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d22ef65f-e312-488f-8607-514c224411e8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d22ef65f-e312-488f-8607-514c224411e8\") " pod="openstack/nova-cell1-conductor-0" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.595784 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-588b4\" (UniqueName: \"kubernetes.io/projected/d22ef65f-e312-488f-8607-514c224411e8-kube-api-access-588b4\") pod \"nova-cell1-conductor-0\" (UID: \"d22ef65f-e312-488f-8607-514c224411e8\") " pod="openstack/nova-cell1-conductor-0" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.595939 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d22ef65f-e312-488f-8607-514c224411e8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d22ef65f-e312-488f-8607-514c224411e8\") " pod="openstack/nova-cell1-conductor-0" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.596184 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d22ef65f-e312-488f-8607-514c224411e8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d22ef65f-e312-488f-8607-514c224411e8\") " pod="openstack/nova-cell1-conductor-0" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.602907 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d22ef65f-e312-488f-8607-514c224411e8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d22ef65f-e312-488f-8607-514c224411e8\") " pod="openstack/nova-cell1-conductor-0" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.606898 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d22ef65f-e312-488f-8607-514c224411e8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d22ef65f-e312-488f-8607-514c224411e8\") " pod="openstack/nova-cell1-conductor-0" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.614573 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-588b4\" (UniqueName: \"kubernetes.io/projected/d22ef65f-e312-488f-8607-514c224411e8-kube-api-access-588b4\") pod \"nova-cell1-conductor-0\" (UID: \"d22ef65f-e312-488f-8607-514c224411e8\") " pod="openstack/nova-cell1-conductor-0" Dec 11 09:46:59 crc kubenswrapper[4788]: I1211 09:46:59.632430 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 11 09:47:00 crc kubenswrapper[4788]: I1211 09:47:00.061038 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 11 09:47:00 crc kubenswrapper[4788]: I1211 09:47:00.233088 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d22ef65f-e312-488f-8607-514c224411e8","Type":"ContainerStarted","Data":"00c625a334aaf29f3eaa6671ad3a2042f9e1d233166115cbe969c4f560c05962"} Dec 11 09:47:01 crc kubenswrapper[4788]: I1211 09:47:01.246088 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d22ef65f-e312-488f-8607-514c224411e8","Type":"ContainerStarted","Data":"d05c483c608ddfeec4ae461b2dd08b5fef1d1529c9f02aaffbfc070e7a160402"} Dec 11 09:47:01 crc kubenswrapper[4788]: I1211 09:47:01.246693 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 11 09:47:01 crc kubenswrapper[4788]: I1211 09:47:01.265521 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.265503556 podStartE2EDuration="2.265503556s" podCreationTimestamp="2025-12-11 09:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:47:01.262713985 +0000 UTC m=+1551.333493571" watchObservedRunningTime="2025-12-11 09:47:01.265503556 +0000 UTC m=+1551.336283142" Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.257342 4788 generic.go:334] "Generic (PLEG): container finished" podID="0c8f7cd2-ce22-4f74-a099-721f8f7454bd" containerID="c93beaa41fbfde835d3ef49837a5f892666833bd81f6ff969d4feb33d54ff069" exitCode=137 Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.257689 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0c8f7cd2-ce22-4f74-a099-721f8f7454bd","Type":"ContainerDied","Data":"c93beaa41fbfde835d3ef49837a5f892666833bd81f6ff969d4feb33d54ff069"} Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.790815 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.870131 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-combined-ca-bundle\") pod \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.870496 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-config-data\") pod \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.870616 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7494\" (UniqueName: \"kubernetes.io/projected/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-kube-api-access-f7494\") pod \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\" (UID: \"0c8f7cd2-ce22-4f74-a099-721f8f7454bd\") " Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.881446 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-kube-api-access-f7494" (OuterVolumeSpecName: "kube-api-access-f7494") pod "0c8f7cd2-ce22-4f74-a099-721f8f7454bd" (UID: "0c8f7cd2-ce22-4f74-a099-721f8f7454bd"). InnerVolumeSpecName "kube-api-access-f7494". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.902994 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-config-data" (OuterVolumeSpecName: "config-data") pod "0c8f7cd2-ce22-4f74-a099-721f8f7454bd" (UID: "0c8f7cd2-ce22-4f74-a099-721f8f7454bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.905014 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0c8f7cd2-ce22-4f74-a099-721f8f7454bd" (UID: "0c8f7cd2-ce22-4f74-a099-721f8f7454bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.975218 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.975281 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7494\" (UniqueName: \"kubernetes.io/projected/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-kube-api-access-f7494\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:02 crc kubenswrapper[4788]: I1211 09:47:02.975297 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c8f7cd2-ce22-4f74-a099-721f8f7454bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.270378 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0c8f7cd2-ce22-4f74-a099-721f8f7454bd","Type":"ContainerDied","Data":"ab6e081769c0fad972337f7d283c968d95a01d45334103ab5e244de9eae38f33"} Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.270447 4788 scope.go:117] "RemoveContainer" containerID="c93beaa41fbfde835d3ef49837a5f892666833bd81f6ff969d4feb33d54ff069" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.271682 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.311367 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.320037 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.342053 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 11 09:47:03 crc kubenswrapper[4788]: E1211 09:47:03.342636 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c8f7cd2-ce22-4f74-a099-721f8f7454bd" containerName="nova-cell1-novncproxy-novncproxy" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.342661 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c8f7cd2-ce22-4f74-a099-721f8f7454bd" containerName="nova-cell1-novncproxy-novncproxy" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.342929 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c8f7cd2-ce22-4f74-a099-721f8f7454bd" containerName="nova-cell1-novncproxy-novncproxy" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.343684 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.348243 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.352532 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.353471 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.363841 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.483717 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk8sd\" (UniqueName: \"kubernetes.io/projected/a1fd7161-dba8-481a-946b-07baf45ffcdf-kube-api-access-pk8sd\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.483795 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.483835 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.484002 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.484073 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.586815 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk8sd\" (UniqueName: \"kubernetes.io/projected/a1fd7161-dba8-481a-946b-07baf45ffcdf-kube-api-access-pk8sd\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.586887 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.586927 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.587000 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.587028 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.592248 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.594084 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.598831 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.602034 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1fd7161-dba8-481a-946b-07baf45ffcdf-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.606277 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk8sd\" (UniqueName: \"kubernetes.io/projected/a1fd7161-dba8-481a-946b-07baf45ffcdf-kube-api-access-pk8sd\") pod \"nova-cell1-novncproxy-0\" (UID: \"a1fd7161-dba8-481a-946b-07baf45ffcdf\") " pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:03 crc kubenswrapper[4788]: I1211 09:47:03.667537 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:04 crc kubenswrapper[4788]: I1211 09:47:04.096422 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 11 09:47:04 crc kubenswrapper[4788]: I1211 09:47:04.285747 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a1fd7161-dba8-481a-946b-07baf45ffcdf","Type":"ContainerStarted","Data":"5e79f5c34c2e462a4a86666a4d1641b542cc134ae3be9eedf718a50e05b71d0e"} Dec 11 09:47:04 crc kubenswrapper[4788]: I1211 09:47:04.507562 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c8f7cd2-ce22-4f74-a099-721f8f7454bd" path="/var/lib/kubelet/pods/0c8f7cd2-ce22-4f74-a099-721f8f7454bd/volumes" Dec 11 09:47:05 crc kubenswrapper[4788]: I1211 09:47:05.296392 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a1fd7161-dba8-481a-946b-07baf45ffcdf","Type":"ContainerStarted","Data":"8b36c4b136914775d9947c3641d2991e80d8e2298b2435d87da2b529f0c45fbd"} Dec 11 09:47:05 crc kubenswrapper[4788]: I1211 09:47:05.321963 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.321941837 podStartE2EDuration="2.321941837s" podCreationTimestamp="2025-12-11 09:47:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:47:05.316661942 +0000 UTC m=+1555.387441548" watchObservedRunningTime="2025-12-11 09:47:05.321941837 +0000 UTC m=+1555.392721423" Dec 11 09:47:05 crc kubenswrapper[4788]: I1211 09:47:05.469787 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 11 09:47:05 crc kubenswrapper[4788]: I1211 09:47:05.470216 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 11 09:47:05 crc kubenswrapper[4788]: I1211 09:47:05.473713 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 11 09:47:05 crc kubenswrapper[4788]: I1211 09:47:05.474872 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 11 09:47:05 crc kubenswrapper[4788]: I1211 09:47:05.491466 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 11 09:47:05 crc kubenswrapper[4788]: I1211 09:47:05.492476 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 11 09:47:05 crc kubenswrapper[4788]: I1211 09:47:05.497437 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.306426 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.310613 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.312309 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.486957 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-872c7"] Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.489761 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.521622 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-872c7"] Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.660190 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.660302 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.660363 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjlsk\" (UniqueName: \"kubernetes.io/projected/208b9146-8d17-4510-b547-b4ca9d7d6feb-kube-api-access-kjlsk\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.660388 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.660432 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.660516 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-config\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.786186 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.786308 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.786372 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjlsk\" (UniqueName: \"kubernetes.io/projected/208b9146-8d17-4510-b547-b4ca9d7d6feb-kube-api-access-kjlsk\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.786395 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.786431 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.786457 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-config\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.787674 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.818296 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.818504 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.818707 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-config\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.824511 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:06 crc kubenswrapper[4788]: I1211 09:47:06.842781 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjlsk\" (UniqueName: \"kubernetes.io/projected/208b9146-8d17-4510-b547-b4ca9d7d6feb-kube-api-access-kjlsk\") pod \"dnsmasq-dns-cd5cbd7b9-872c7\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:07 crc kubenswrapper[4788]: I1211 09:47:07.129634 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:07 crc kubenswrapper[4788]: I1211 09:47:07.709135 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-872c7"] Dec 11 09:47:08 crc kubenswrapper[4788]: E1211 09:47:08.331961 4788 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod208b9146_8d17_4510_b547_b4ca9d7d6feb.slice/crio-6faf8b5a46b4d36651ee5ba5f0bbe7ac0ff11f4c94c4ff4a2b39ac148ef8c095.scope\": RecentStats: unable to find data in memory cache]" Dec 11 09:47:08 crc kubenswrapper[4788]: I1211 09:47:08.339699 4788 generic.go:334] "Generic (PLEG): container finished" podID="208b9146-8d17-4510-b547-b4ca9d7d6feb" containerID="6faf8b5a46b4d36651ee5ba5f0bbe7ac0ff11f4c94c4ff4a2b39ac148ef8c095" exitCode=0 Dec 11 09:47:08 crc kubenswrapper[4788]: I1211 09:47:08.340421 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" event={"ID":"208b9146-8d17-4510-b547-b4ca9d7d6feb","Type":"ContainerDied","Data":"6faf8b5a46b4d36651ee5ba5f0bbe7ac0ff11f4c94c4ff4a2b39ac148ef8c095"} Dec 11 09:47:08 crc kubenswrapper[4788]: I1211 09:47:08.340490 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" event={"ID":"208b9146-8d17-4510-b547-b4ca9d7d6feb","Type":"ContainerStarted","Data":"55ffb176a98e1b8baf3d4ebb9949f27909e0db019e8252bcae73330ee339803e"} Dec 11 09:47:08 crc kubenswrapper[4788]: I1211 09:47:08.687846 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:09 crc kubenswrapper[4788]: I1211 09:47:09.051339 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:09 crc kubenswrapper[4788]: I1211 09:47:09.348464 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a972367d-1100-4620-9a99-929961dd63b8" containerName="nova-api-log" containerID="cri-o://f813c73ee7daea1578bfbedcfdc02aa073fc82192e0ef5afd804593c5fb2bb26" gracePeriod=30 Dec 11 09:47:09 crc kubenswrapper[4788]: I1211 09:47:09.348527 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a972367d-1100-4620-9a99-929961dd63b8" containerName="nova-api-api" containerID="cri-o://7c5e9b027193a3b551de910b6d47fc953c3ad195794311dd2a53ef20e102e9ad" gracePeriod=30 Dec 11 09:47:09 crc kubenswrapper[4788]: I1211 09:47:09.665945 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 11 09:47:10 crc kubenswrapper[4788]: I1211 09:47:10.364218 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" event={"ID":"208b9146-8d17-4510-b547-b4ca9d7d6feb","Type":"ContainerStarted","Data":"783f9e601234bc83351dc3a3b2196f6f94db5ffa6aa7ebef4e8703035e95e1de"} Dec 11 09:47:10 crc kubenswrapper[4788]: I1211 09:47:10.364414 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:10 crc kubenswrapper[4788]: I1211 09:47:10.366785 4788 generic.go:334] "Generic (PLEG): container finished" podID="a972367d-1100-4620-9a99-929961dd63b8" containerID="f813c73ee7daea1578bfbedcfdc02aa073fc82192e0ef5afd804593c5fb2bb26" exitCode=143 Dec 11 09:47:10 crc kubenswrapper[4788]: I1211 09:47:10.366890 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a972367d-1100-4620-9a99-929961dd63b8","Type":"ContainerDied","Data":"f813c73ee7daea1578bfbedcfdc02aa073fc82192e0ef5afd804593c5fb2bb26"} Dec 11 09:47:11 crc kubenswrapper[4788]: I1211 09:47:11.022092 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" podStartSLOduration=5.022069925 podStartE2EDuration="5.022069925s" podCreationTimestamp="2025-12-11 09:47:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:47:10.388791139 +0000 UTC m=+1560.459570715" watchObservedRunningTime="2025-12-11 09:47:11.022069925 +0000 UTC m=+1561.092849531" Dec 11 09:47:11 crc kubenswrapper[4788]: I1211 09:47:11.028245 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:47:11 crc kubenswrapper[4788]: I1211 09:47:11.028571 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="ceilometer-central-agent" containerID="cri-o://383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91" gracePeriod=30 Dec 11 09:47:11 crc kubenswrapper[4788]: I1211 09:47:11.028648 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="ceilometer-notification-agent" containerID="cri-o://d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f" gracePeriod=30 Dec 11 09:47:11 crc kubenswrapper[4788]: I1211 09:47:11.028726 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="proxy-httpd" containerID="cri-o://c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df" gracePeriod=30 Dec 11 09:47:11 crc kubenswrapper[4788]: I1211 09:47:11.028641 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="sg-core" containerID="cri-o://501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4" gracePeriod=30 Dec 11 09:47:11 crc kubenswrapper[4788]: I1211 09:47:11.379549 4788 generic.go:334] "Generic (PLEG): container finished" podID="3a795382-940a-45a1-bb02-8b88317194a6" containerID="c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df" exitCode=0 Dec 11 09:47:11 crc kubenswrapper[4788]: I1211 09:47:11.379596 4788 generic.go:334] "Generic (PLEG): container finished" podID="3a795382-940a-45a1-bb02-8b88317194a6" containerID="501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4" exitCode=2 Dec 11 09:47:11 crc kubenswrapper[4788]: I1211 09:47:11.379620 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a795382-940a-45a1-bb02-8b88317194a6","Type":"ContainerDied","Data":"c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df"} Dec 11 09:47:11 crc kubenswrapper[4788]: I1211 09:47:11.379663 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a795382-940a-45a1-bb02-8b88317194a6","Type":"ContainerDied","Data":"501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4"} Dec 11 09:47:12 crc kubenswrapper[4788]: I1211 09:47:12.390849 4788 generic.go:334] "Generic (PLEG): container finished" podID="3a795382-940a-45a1-bb02-8b88317194a6" containerID="383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91" exitCode=0 Dec 11 09:47:12 crc kubenswrapper[4788]: I1211 09:47:12.390889 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a795382-940a-45a1-bb02-8b88317194a6","Type":"ContainerDied","Data":"383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91"} Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.406927 4788 generic.go:334] "Generic (PLEG): container finished" podID="a972367d-1100-4620-9a99-929961dd63b8" containerID="7c5e9b027193a3b551de910b6d47fc953c3ad195794311dd2a53ef20e102e9ad" exitCode=0 Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.407222 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a972367d-1100-4620-9a99-929961dd63b8","Type":"ContainerDied","Data":"7c5e9b027193a3b551de910b6d47fc953c3ad195794311dd2a53ef20e102e9ad"} Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.607488 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.670991 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.696049 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-combined-ca-bundle\") pod \"a972367d-1100-4620-9a99-929961dd63b8\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.696122 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbgbv\" (UniqueName: \"kubernetes.io/projected/a972367d-1100-4620-9a99-929961dd63b8-kube-api-access-nbgbv\") pod \"a972367d-1100-4620-9a99-929961dd63b8\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.696835 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a972367d-1100-4620-9a99-929961dd63b8-logs\") pod \"a972367d-1100-4620-9a99-929961dd63b8\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.696943 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-config-data\") pod \"a972367d-1100-4620-9a99-929961dd63b8\" (UID: \"a972367d-1100-4620-9a99-929961dd63b8\") " Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.697714 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a972367d-1100-4620-9a99-929961dd63b8-logs" (OuterVolumeSpecName: "logs") pod "a972367d-1100-4620-9a99-929961dd63b8" (UID: "a972367d-1100-4620-9a99-929961dd63b8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.716973 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.721345 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a972367d-1100-4620-9a99-929961dd63b8-kube-api-access-nbgbv" (OuterVolumeSpecName: "kube-api-access-nbgbv") pod "a972367d-1100-4620-9a99-929961dd63b8" (UID: "a972367d-1100-4620-9a99-929961dd63b8"). InnerVolumeSpecName "kube-api-access-nbgbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.742068 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a972367d-1100-4620-9a99-929961dd63b8" (UID: "a972367d-1100-4620-9a99-929961dd63b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.754384 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-config-data" (OuterVolumeSpecName: "config-data") pod "a972367d-1100-4620-9a99-929961dd63b8" (UID: "a972367d-1100-4620-9a99-929961dd63b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.799133 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.799166 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbgbv\" (UniqueName: \"kubernetes.io/projected/a972367d-1100-4620-9a99-929961dd63b8-kube-api-access-nbgbv\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.799178 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a972367d-1100-4620-9a99-929961dd63b8-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:13 crc kubenswrapper[4788]: I1211 09:47:13.799187 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a972367d-1100-4620-9a99-929961dd63b8-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.420082 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a972367d-1100-4620-9a99-929961dd63b8","Type":"ContainerDied","Data":"0985dc5c737a5a9fa7c755f3911dc546accd79ffa36761f636041975fb0541ce"} Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.420557 4788 scope.go:117] "RemoveContainer" containerID="7c5e9b027193a3b551de910b6d47fc953c3ad195794311dd2a53ef20e102e9ad" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.420147 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.443453 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.452943 4788 scope.go:117] "RemoveContainer" containerID="f813c73ee7daea1578bfbedcfdc02aa073fc82192e0ef5afd804593c5fb2bb26" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.477572 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.493874 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.509860 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a972367d-1100-4620-9a99-929961dd63b8" path="/var/lib/kubelet/pods/a972367d-1100-4620-9a99-929961dd63b8/volumes" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.543990 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:14 crc kubenswrapper[4788]: E1211 09:47:14.544549 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a972367d-1100-4620-9a99-929961dd63b8" containerName="nova-api-log" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.544569 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a972367d-1100-4620-9a99-929961dd63b8" containerName="nova-api-log" Dec 11 09:47:14 crc kubenswrapper[4788]: E1211 09:47:14.544596 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a972367d-1100-4620-9a99-929961dd63b8" containerName="nova-api-api" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.544604 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a972367d-1100-4620-9a99-929961dd63b8" containerName="nova-api-api" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.544849 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="a972367d-1100-4620-9a99-929961dd63b8" containerName="nova-api-api" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.544893 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="a972367d-1100-4620-9a99-929961dd63b8" containerName="nova-api-log" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.546163 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.549023 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.549280 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.549464 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.554804 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.617696 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-config-data\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.617762 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.617797 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-public-tls-certs\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.617843 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh8hw\" (UniqueName: \"kubernetes.io/projected/63b89091-40fe-446f-9f84-b6a2b0e98bad-kube-api-access-nh8hw\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.617975 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-internal-tls-certs\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.618071 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63b89091-40fe-446f-9f84-b6a2b0e98bad-logs\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.681722 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-4664m"] Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.683190 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.685570 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.685967 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.693620 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-4664m"] Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.719889 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.720218 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-public-tls-certs\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.720390 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-config-data\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.720540 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh8hw\" (UniqueName: \"kubernetes.io/projected/63b89091-40fe-446f-9f84-b6a2b0e98bad-kube-api-access-nh8hw\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.720661 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.720804 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjknz\" (UniqueName: \"kubernetes.io/projected/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-kube-api-access-cjknz\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.720955 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-scripts\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.721072 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-internal-tls-certs\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.721697 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63b89091-40fe-446f-9f84-b6a2b0e98bad-logs\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.721857 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-config-data\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.722894 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63b89091-40fe-446f-9f84-b6a2b0e98bad-logs\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.725529 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-public-tls-certs\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.725543 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.727336 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-internal-tls-certs\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.733624 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-config-data\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.737551 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh8hw\" (UniqueName: \"kubernetes.io/projected/63b89091-40fe-446f-9f84-b6a2b0e98bad-kube-api-access-nh8hw\") pod \"nova-api-0\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " pod="openstack/nova-api-0" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.823326 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-config-data\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.823448 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.823479 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjknz\" (UniqueName: \"kubernetes.io/projected/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-kube-api-access-cjknz\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.823517 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-scripts\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.828095 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-scripts\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.828780 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.829424 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-config-data\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.845721 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjknz\" (UniqueName: \"kubernetes.io/projected/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-kube-api-access-cjknz\") pod \"nova-cell1-cell-mapping-4664m\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:14 crc kubenswrapper[4788]: I1211 09:47:14.876821 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:47:15 crc kubenswrapper[4788]: I1211 09:47:15.001396 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:15 crc kubenswrapper[4788]: I1211 09:47:15.355121 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:15 crc kubenswrapper[4788]: W1211 09:47:15.361550 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63b89091_40fe_446f_9f84_b6a2b0e98bad.slice/crio-4cae3b52bbed7518224d32a0a59ee69d411eb0b9f3608b30c09283e31af8391a WatchSource:0}: Error finding container 4cae3b52bbed7518224d32a0a59ee69d411eb0b9f3608b30c09283e31af8391a: Status 404 returned error can't find the container with id 4cae3b52bbed7518224d32a0a59ee69d411eb0b9f3608b30c09283e31af8391a Dec 11 09:47:15 crc kubenswrapper[4788]: I1211 09:47:15.434586 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63b89091-40fe-446f-9f84-b6a2b0e98bad","Type":"ContainerStarted","Data":"4cae3b52bbed7518224d32a0a59ee69d411eb0b9f3608b30c09283e31af8391a"} Dec 11 09:47:15 crc kubenswrapper[4788]: I1211 09:47:15.476933 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-4664m"] Dec 11 09:47:15 crc kubenswrapper[4788]: W1211 09:47:15.487896 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1098e947_9b9a_4eeb_8ecd_1c0e1253adcc.slice/crio-1d2f4784d2c00644bb22ba0963bd1e7cb6e18318b98dcce80b5d91da67c31918 WatchSource:0}: Error finding container 1d2f4784d2c00644bb22ba0963bd1e7cb6e18318b98dcce80b5d91da67c31918: Status 404 returned error can't find the container with id 1d2f4784d2c00644bb22ba0963bd1e7cb6e18318b98dcce80b5d91da67c31918 Dec 11 09:47:16 crc kubenswrapper[4788]: I1211 09:47:16.449100 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4664m" event={"ID":"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc","Type":"ContainerStarted","Data":"043f5585e5f2c06c04bcd6ac5c529b2f95bc9545bc4b9683ceb450ec444d1dea"} Dec 11 09:47:16 crc kubenswrapper[4788]: I1211 09:47:16.449350 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4664m" event={"ID":"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc","Type":"ContainerStarted","Data":"1d2f4784d2c00644bb22ba0963bd1e7cb6e18318b98dcce80b5d91da67c31918"} Dec 11 09:47:16 crc kubenswrapper[4788]: I1211 09:47:16.453255 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63b89091-40fe-446f-9f84-b6a2b0e98bad","Type":"ContainerStarted","Data":"b9a6698f8582f18125b1130c02a41edb0e0621af50b40bf019c02f468558ddcd"} Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.131528 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.206563 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-xvscx"] Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.206957 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" podUID="107c813a-c386-4ffe-8206-ff0badb52f03" containerName="dnsmasq-dns" containerID="cri-o://173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3" gracePeriod=10 Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.410390 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.465983 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63b89091-40fe-446f-9f84-b6a2b0e98bad","Type":"ContainerStarted","Data":"11316b361358252fa7d5cd100d8b9918617959201f5220da5b45190788e88d7c"} Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.474159 4788 generic.go:334] "Generic (PLEG): container finished" podID="3a795382-940a-45a1-bb02-8b88317194a6" containerID="d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f" exitCode=0 Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.474329 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.474374 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a795382-940a-45a1-bb02-8b88317194a6","Type":"ContainerDied","Data":"d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f"} Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.474509 4788 scope.go:117] "RemoveContainer" containerID="c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.474747 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3a795382-940a-45a1-bb02-8b88317194a6","Type":"ContainerDied","Data":"bfdc03adbdd7e75495b3a034b4002f4ce8bc868481ae506af5aa08be3378d7fb"} Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.497378 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-4664m" podStartSLOduration=3.497345286 podStartE2EDuration="3.497345286s" podCreationTimestamp="2025-12-11 09:47:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:47:17.491105956 +0000 UTC m=+1567.561885552" watchObservedRunningTime="2025-12-11 09:47:17.497345286 +0000 UTC m=+1567.568124862" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.502556 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59q4h\" (UniqueName: \"kubernetes.io/projected/3a795382-940a-45a1-bb02-8b88317194a6-kube-api-access-59q4h\") pod \"3a795382-940a-45a1-bb02-8b88317194a6\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.502668 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-run-httpd\") pod \"3a795382-940a-45a1-bb02-8b88317194a6\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.502770 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-config-data\") pod \"3a795382-940a-45a1-bb02-8b88317194a6\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.502794 4788 scope.go:117] "RemoveContainer" containerID="501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.502817 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-log-httpd\") pod \"3a795382-940a-45a1-bb02-8b88317194a6\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.502841 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-ceilometer-tls-certs\") pod \"3a795382-940a-45a1-bb02-8b88317194a6\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.502874 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-combined-ca-bundle\") pod \"3a795382-940a-45a1-bb02-8b88317194a6\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.502967 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-scripts\") pod \"3a795382-940a-45a1-bb02-8b88317194a6\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.503013 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-sg-core-conf-yaml\") pod \"3a795382-940a-45a1-bb02-8b88317194a6\" (UID: \"3a795382-940a-45a1-bb02-8b88317194a6\") " Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.503623 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3a795382-940a-45a1-bb02-8b88317194a6" (UID: "3a795382-940a-45a1-bb02-8b88317194a6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.503708 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3a795382-940a-45a1-bb02-8b88317194a6" (UID: "3a795382-940a-45a1-bb02-8b88317194a6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.503917 4788 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.503941 4788 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3a795382-940a-45a1-bb02-8b88317194a6-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.513992 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-scripts" (OuterVolumeSpecName: "scripts") pod "3a795382-940a-45a1-bb02-8b88317194a6" (UID: "3a795382-940a-45a1-bb02-8b88317194a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.525579 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a795382-940a-45a1-bb02-8b88317194a6-kube-api-access-59q4h" (OuterVolumeSpecName: "kube-api-access-59q4h") pod "3a795382-940a-45a1-bb02-8b88317194a6" (UID: "3a795382-940a-45a1-bb02-8b88317194a6"). InnerVolumeSpecName "kube-api-access-59q4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.563491 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3a795382-940a-45a1-bb02-8b88317194a6" (UID: "3a795382-940a-45a1-bb02-8b88317194a6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.606342 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.606391 4788 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.606406 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59q4h\" (UniqueName: \"kubernetes.io/projected/3a795382-940a-45a1-bb02-8b88317194a6-kube-api-access-59q4h\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.620486 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a795382-940a-45a1-bb02-8b88317194a6" (UID: "3a795382-940a-45a1-bb02-8b88317194a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.650534 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "3a795382-940a-45a1-bb02-8b88317194a6" (UID: "3a795382-940a-45a1-bb02-8b88317194a6"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.654785 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-config-data" (OuterVolumeSpecName: "config-data") pod "3a795382-940a-45a1-bb02-8b88317194a6" (UID: "3a795382-940a-45a1-bb02-8b88317194a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.707487 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.707518 4788 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.707528 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a795382-940a-45a1-bb02-8b88317194a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.799390 4788 scope.go:117] "RemoveContainer" containerID="d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.817158 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.835639 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.848733 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:47:17 crc kubenswrapper[4788]: E1211 09:47:17.849266 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="ceilometer-notification-agent" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.849284 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="ceilometer-notification-agent" Dec 11 09:47:17 crc kubenswrapper[4788]: E1211 09:47:17.849304 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="ceilometer-central-agent" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.849311 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="ceilometer-central-agent" Dec 11 09:47:17 crc kubenswrapper[4788]: E1211 09:47:17.849337 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="sg-core" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.849344 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="sg-core" Dec 11 09:47:17 crc kubenswrapper[4788]: E1211 09:47:17.849354 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="proxy-httpd" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.849359 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="proxy-httpd" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.849556 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="ceilometer-central-agent" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.849585 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="proxy-httpd" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.849595 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="ceilometer-notification-agent" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.849605 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a795382-940a-45a1-bb02-8b88317194a6" containerName="sg-core" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.849879 4788 scope.go:117] "RemoveContainer" containerID="383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.851339 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.853741 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.854367 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.854416 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.861747 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.897889 4788 scope.go:117] "RemoveContainer" containerID="c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df" Dec 11 09:47:17 crc kubenswrapper[4788]: E1211 09:47:17.898692 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df\": container with ID starting with c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df not found: ID does not exist" containerID="c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.898735 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df"} err="failed to get container status \"c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df\": rpc error: code = NotFound desc = could not find container \"c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df\": container with ID starting with c70d581a0fb9853b9d8791f31fa44961347fdf9e06b4cdc38e953127e4dbe7df not found: ID does not exist" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.898768 4788 scope.go:117] "RemoveContainer" containerID="501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4" Dec 11 09:47:17 crc kubenswrapper[4788]: E1211 09:47:17.899081 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4\": container with ID starting with 501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4 not found: ID does not exist" containerID="501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.899113 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4"} err="failed to get container status \"501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4\": rpc error: code = NotFound desc = could not find container \"501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4\": container with ID starting with 501e7dad75b3d44c0b12b09deca556353d77df809e3430346d614ad6c8c919f4 not found: ID does not exist" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.899138 4788 scope.go:117] "RemoveContainer" containerID="d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f" Dec 11 09:47:17 crc kubenswrapper[4788]: E1211 09:47:17.899549 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f\": container with ID starting with d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f not found: ID does not exist" containerID="d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.899572 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f"} err="failed to get container status \"d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f\": rpc error: code = NotFound desc = could not find container \"d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f\": container with ID starting with d8ea7bf7c460086314f1872dbec0177bd072e22d1f8b7ca180afa3d237ab2e9f not found: ID does not exist" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.899587 4788 scope.go:117] "RemoveContainer" containerID="383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91" Dec 11 09:47:17 crc kubenswrapper[4788]: E1211 09:47:17.899845 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91\": container with ID starting with 383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91 not found: ID does not exist" containerID="383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.899881 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91"} err="failed to get container status \"383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91\": rpc error: code = NotFound desc = could not find container \"383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91\": container with ID starting with 383c1f80bb08870ba8da16ac81eccb61ea8daa53c29ee55504d0c930404e2e91 not found: ID does not exist" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.910661 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-run-httpd\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.910713 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.910745 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.910869 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-log-httpd\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.910891 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-scripts\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.910986 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.911215 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-config-data\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:17 crc kubenswrapper[4788]: I1211 09:47:17.911407 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf57q\" (UniqueName: \"kubernetes.io/projected/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-kube-api-access-hf57q\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.013274 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.013334 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-config-data\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.013366 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf57q\" (UniqueName: \"kubernetes.io/projected/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-kube-api-access-hf57q\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.013434 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-run-httpd\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.013457 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.013478 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.013516 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-log-httpd\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.013534 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-scripts\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.014253 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-log-httpd\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.014281 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-run-httpd\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.018725 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.019047 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.019808 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.020367 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-config-data\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.021221 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-scripts\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.038574 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf57q\" (UniqueName: \"kubernetes.io/projected/7e44e4da-495b-45b4-bfef-27ccb5dd3a51-kube-api-access-hf57q\") pod \"ceilometer-0\" (UID: \"7e44e4da-495b-45b4-bfef-27ccb5dd3a51\") " pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.181377 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.414448 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.487369 4788 generic.go:334] "Generic (PLEG): container finished" podID="107c813a-c386-4ffe-8206-ff0badb52f03" containerID="173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3" exitCode=0 Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.488692 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.488834 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" event={"ID":"107c813a-c386-4ffe-8206-ff0badb52f03","Type":"ContainerDied","Data":"173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3"} Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.488996 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-xvscx" event={"ID":"107c813a-c386-4ffe-8206-ff0badb52f03","Type":"ContainerDied","Data":"3a3837d8d987429025086faabad27ffac220fe404e230c3c6e81e6c1eb689761"} Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.489113 4788 scope.go:117] "RemoveContainer" containerID="173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.516333 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.516313145 podStartE2EDuration="4.516313145s" podCreationTimestamp="2025-12-11 09:47:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:47:18.507730616 +0000 UTC m=+1568.578510222" watchObservedRunningTime="2025-12-11 09:47:18.516313145 +0000 UTC m=+1568.587092741" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.526093 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a795382-940a-45a1-bb02-8b88317194a6" path="/var/lib/kubelet/pods/3a795382-940a-45a1-bb02-8b88317194a6/volumes" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.527676 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-nb\") pod \"107c813a-c386-4ffe-8206-ff0badb52f03\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.527750 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-swift-storage-0\") pod \"107c813a-c386-4ffe-8206-ff0badb52f03\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.527800 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-config\") pod \"107c813a-c386-4ffe-8206-ff0badb52f03\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.527815 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpw8h\" (UniqueName: \"kubernetes.io/projected/107c813a-c386-4ffe-8206-ff0badb52f03-kube-api-access-wpw8h\") pod \"107c813a-c386-4ffe-8206-ff0badb52f03\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.527875 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-sb\") pod \"107c813a-c386-4ffe-8206-ff0badb52f03\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.527900 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-svc\") pod \"107c813a-c386-4ffe-8206-ff0badb52f03\" (UID: \"107c813a-c386-4ffe-8206-ff0badb52f03\") " Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.535585 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/107c813a-c386-4ffe-8206-ff0badb52f03-kube-api-access-wpw8h" (OuterVolumeSpecName: "kube-api-access-wpw8h") pod "107c813a-c386-4ffe-8206-ff0badb52f03" (UID: "107c813a-c386-4ffe-8206-ff0badb52f03"). InnerVolumeSpecName "kube-api-access-wpw8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.536003 4788 scope.go:117] "RemoveContainer" containerID="0bad877ff5952d9824659098ab7e8c88a118b9a49b61a619498ae46b43938f08" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.591902 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "107c813a-c386-4ffe-8206-ff0badb52f03" (UID: "107c813a-c386-4ffe-8206-ff0badb52f03"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.597116 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "107c813a-c386-4ffe-8206-ff0badb52f03" (UID: "107c813a-c386-4ffe-8206-ff0badb52f03"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.602365 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "107c813a-c386-4ffe-8206-ff0badb52f03" (UID: "107c813a-c386-4ffe-8206-ff0badb52f03"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.605876 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "107c813a-c386-4ffe-8206-ff0badb52f03" (UID: "107c813a-c386-4ffe-8206-ff0badb52f03"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.607253 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-config" (OuterVolumeSpecName: "config") pod "107c813a-c386-4ffe-8206-ff0badb52f03" (UID: "107c813a-c386-4ffe-8206-ff0badb52f03"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.631977 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.632014 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpw8h\" (UniqueName: \"kubernetes.io/projected/107c813a-c386-4ffe-8206-ff0badb52f03-kube-api-access-wpw8h\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.632028 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.632037 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.632045 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.632059 4788 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/107c813a-c386-4ffe-8206-ff0badb52f03-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.711671 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.824288 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-xvscx"] Dec 11 09:47:18 crc kubenswrapper[4788]: I1211 09:47:18.833135 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-xvscx"] Dec 11 09:47:19 crc kubenswrapper[4788]: I1211 09:47:19.053147 4788 scope.go:117] "RemoveContainer" containerID="173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3" Dec 11 09:47:19 crc kubenswrapper[4788]: E1211 09:47:19.053540 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3\": container with ID starting with 173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3 not found: ID does not exist" containerID="173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3" Dec 11 09:47:19 crc kubenswrapper[4788]: I1211 09:47:19.053585 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3"} err="failed to get container status \"173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3\": rpc error: code = NotFound desc = could not find container \"173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3\": container with ID starting with 173b8b9e078a597c1f967a558868077bbdb02a257652222d40e52110880289b3 not found: ID does not exist" Dec 11 09:47:19 crc kubenswrapper[4788]: I1211 09:47:19.053613 4788 scope.go:117] "RemoveContainer" containerID="0bad877ff5952d9824659098ab7e8c88a118b9a49b61a619498ae46b43938f08" Dec 11 09:47:19 crc kubenswrapper[4788]: E1211 09:47:19.053830 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bad877ff5952d9824659098ab7e8c88a118b9a49b61a619498ae46b43938f08\": container with ID starting with 0bad877ff5952d9824659098ab7e8c88a118b9a49b61a619498ae46b43938f08 not found: ID does not exist" containerID="0bad877ff5952d9824659098ab7e8c88a118b9a49b61a619498ae46b43938f08" Dec 11 09:47:19 crc kubenswrapper[4788]: I1211 09:47:19.053867 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bad877ff5952d9824659098ab7e8c88a118b9a49b61a619498ae46b43938f08"} err="failed to get container status \"0bad877ff5952d9824659098ab7e8c88a118b9a49b61a619498ae46b43938f08\": rpc error: code = NotFound desc = could not find container \"0bad877ff5952d9824659098ab7e8c88a118b9a49b61a619498ae46b43938f08\": container with ID starting with 0bad877ff5952d9824659098ab7e8c88a118b9a49b61a619498ae46b43938f08 not found: ID does not exist" Dec 11 09:47:19 crc kubenswrapper[4788]: I1211 09:47:19.499136 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e44e4da-495b-45b4-bfef-27ccb5dd3a51","Type":"ContainerStarted","Data":"e4ff523c4c95eb17042fbf7d8089f778cf390cd23766951f90f3d9e899ffe982"} Dec 11 09:47:20 crc kubenswrapper[4788]: I1211 09:47:20.505855 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="107c813a-c386-4ffe-8206-ff0badb52f03" path="/var/lib/kubelet/pods/107c813a-c386-4ffe-8206-ff0badb52f03/volumes" Dec 11 09:47:21 crc kubenswrapper[4788]: I1211 09:47:21.368957 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:47:21 crc kubenswrapper[4788]: I1211 09:47:21.369031 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:47:21 crc kubenswrapper[4788]: I1211 09:47:21.518367 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e44e4da-495b-45b4-bfef-27ccb5dd3a51","Type":"ContainerStarted","Data":"c83e3b7c2b83d6f42eacb4339223b2a449b8fade90b0262fe850dc3905a908c5"} Dec 11 09:47:24 crc kubenswrapper[4788]: I1211 09:47:24.551118 4788 generic.go:334] "Generic (PLEG): container finished" podID="1098e947-9b9a-4eeb-8ecd-1c0e1253adcc" containerID="043f5585e5f2c06c04bcd6ac5c529b2f95bc9545bc4b9683ceb450ec444d1dea" exitCode=0 Dec 11 09:47:24 crc kubenswrapper[4788]: I1211 09:47:24.551267 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4664m" event={"ID":"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc","Type":"ContainerDied","Data":"043f5585e5f2c06c04bcd6ac5c529b2f95bc9545bc4b9683ceb450ec444d1dea"} Dec 11 09:47:24 crc kubenswrapper[4788]: I1211 09:47:24.877178 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 11 09:47:24 crc kubenswrapper[4788]: I1211 09:47:24.877625 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 11 09:47:25 crc kubenswrapper[4788]: I1211 09:47:25.563488 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e44e4da-495b-45b4-bfef-27ccb5dd3a51","Type":"ContainerStarted","Data":"00fd56a31e7cc81bd7420f47ba8ee89b1a7db82e8b8e4797fa441953ce76c38e"} Dec 11 09:47:25 crc kubenswrapper[4788]: I1211 09:47:25.891743 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 11 09:47:25 crc kubenswrapper[4788]: I1211 09:47:25.891760 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.094730 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.188564 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjknz\" (UniqueName: \"kubernetes.io/projected/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-kube-api-access-cjknz\") pod \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.188628 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-combined-ca-bundle\") pod \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.188664 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-config-data\") pod \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.188825 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-scripts\") pod \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\" (UID: \"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc\") " Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.193829 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-kube-api-access-cjknz" (OuterVolumeSpecName: "kube-api-access-cjknz") pod "1098e947-9b9a-4eeb-8ecd-1c0e1253adcc" (UID: "1098e947-9b9a-4eeb-8ecd-1c0e1253adcc"). InnerVolumeSpecName "kube-api-access-cjknz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.194640 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-scripts" (OuterVolumeSpecName: "scripts") pod "1098e947-9b9a-4eeb-8ecd-1c0e1253adcc" (UID: "1098e947-9b9a-4eeb-8ecd-1c0e1253adcc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.219572 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-config-data" (OuterVolumeSpecName: "config-data") pod "1098e947-9b9a-4eeb-8ecd-1c0e1253adcc" (UID: "1098e947-9b9a-4eeb-8ecd-1c0e1253adcc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.225478 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1098e947-9b9a-4eeb-8ecd-1c0e1253adcc" (UID: "1098e947-9b9a-4eeb-8ecd-1c0e1253adcc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.291211 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjknz\" (UniqueName: \"kubernetes.io/projected/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-kube-api-access-cjknz\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.291349 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.291363 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.291373 4788 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.575434 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4664m" event={"ID":"1098e947-9b9a-4eeb-8ecd-1c0e1253adcc","Type":"ContainerDied","Data":"1d2f4784d2c00644bb22ba0963bd1e7cb6e18318b98dcce80b5d91da67c31918"} Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.575513 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d2f4784d2c00644bb22ba0963bd1e7cb6e18318b98dcce80b5d91da67c31918" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.575455 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4664m" Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.578269 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e44e4da-495b-45b4-bfef-27ccb5dd3a51","Type":"ContainerStarted","Data":"0fcea2d783ecaa930cb805878f4e6273e645eca0dff8ed2f5544ecb6f0298ab9"} Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.768434 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.768805 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerName="nova-api-log" containerID="cri-o://b9a6698f8582f18125b1130c02a41edb0e0621af50b40bf019c02f468558ddcd" gracePeriod=30 Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.768909 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerName="nova-api-api" containerID="cri-o://11316b361358252fa7d5cd100d8b9918617959201f5220da5b45190788e88d7c" gracePeriod=30 Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.778377 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.778648 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="68d2836e-dfca-46dd-bfc5-58f6eaf649e5" containerName="nova-scheduler-scheduler" containerID="cri-o://e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe" gracePeriod=30 Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.881565 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.882272 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-log" containerID="cri-o://da468444b2e89b4e5c979b6fc23b53adfbc0421fa24be7380f1ef7bee21f1e56" gracePeriod=30 Dec 11 09:47:26 crc kubenswrapper[4788]: I1211 09:47:26.882908 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-metadata" containerID="cri-o://a6a7fbecd1fc2c3a16e347f89e24f0cdbc6ef6f74f2a0a3e8f59dc9e037f2d10" gracePeriod=30 Dec 11 09:47:27 crc kubenswrapper[4788]: E1211 09:47:27.524307 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:47:27 crc kubenswrapper[4788]: E1211 09:47:27.526058 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:47:27 crc kubenswrapper[4788]: E1211 09:47:27.527303 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:47:27 crc kubenswrapper[4788]: E1211 09:47:27.527347 4788 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="68d2836e-dfca-46dd-bfc5-58f6eaf649e5" containerName="nova-scheduler-scheduler" Dec 11 09:47:27 crc kubenswrapper[4788]: I1211 09:47:27.589704 4788 generic.go:334] "Generic (PLEG): container finished" podID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerID="da468444b2e89b4e5c979b6fc23b53adfbc0421fa24be7380f1ef7bee21f1e56" exitCode=143 Dec 11 09:47:27 crc kubenswrapper[4788]: I1211 09:47:27.589775 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"89b62ded-d0fb-4df2-b6a0-1c72d1575865","Type":"ContainerDied","Data":"da468444b2e89b4e5c979b6fc23b53adfbc0421fa24be7380f1ef7bee21f1e56"} Dec 11 09:47:27 crc kubenswrapper[4788]: I1211 09:47:27.592305 4788 generic.go:334] "Generic (PLEG): container finished" podID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerID="b9a6698f8582f18125b1130c02a41edb0e0621af50b40bf019c02f468558ddcd" exitCode=143 Dec 11 09:47:27 crc kubenswrapper[4788]: I1211 09:47:27.592337 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63b89091-40fe-446f-9f84-b6a2b0e98bad","Type":"ContainerDied","Data":"b9a6698f8582f18125b1130c02a41edb0e0621af50b40bf019c02f468558ddcd"} Dec 11 09:47:30 crc kubenswrapper[4788]: I1211 09:47:30.486841 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": dial tcp 10.217.0.197:8775: connect: connection refused" Dec 11 09:47:30 crc kubenswrapper[4788]: I1211 09:47:30.486839 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": dial tcp 10.217.0.197:8775: connect: connection refused" Dec 11 09:47:30 crc kubenswrapper[4788]: I1211 09:47:30.621125 4788 generic.go:334] "Generic (PLEG): container finished" podID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerID="a6a7fbecd1fc2c3a16e347f89e24f0cdbc6ef6f74f2a0a3e8f59dc9e037f2d10" exitCode=0 Dec 11 09:47:30 crc kubenswrapper[4788]: I1211 09:47:30.621167 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"89b62ded-d0fb-4df2-b6a0-1c72d1575865","Type":"ContainerDied","Data":"a6a7fbecd1fc2c3a16e347f89e24f0cdbc6ef6f74f2a0a3e8f59dc9e037f2d10"} Dec 11 09:47:31 crc kubenswrapper[4788]: I1211 09:47:31.797000 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:47:31 crc kubenswrapper[4788]: I1211 09:47:31.898629 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-combined-ca-bundle\") pod \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " Dec 11 09:47:31 crc kubenswrapper[4788]: I1211 09:47:31.898849 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-config-data\") pod \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " Dec 11 09:47:31 crc kubenswrapper[4788]: I1211 09:47:31.898890 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jjj9\" (UniqueName: \"kubernetes.io/projected/89b62ded-d0fb-4df2-b6a0-1c72d1575865-kube-api-access-5jjj9\") pod \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " Dec 11 09:47:31 crc kubenswrapper[4788]: I1211 09:47:31.898949 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89b62ded-d0fb-4df2-b6a0-1c72d1575865-logs\") pod \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " Dec 11 09:47:31 crc kubenswrapper[4788]: I1211 09:47:31.899109 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-nova-metadata-tls-certs\") pod \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\" (UID: \"89b62ded-d0fb-4df2-b6a0-1c72d1575865\") " Dec 11 09:47:31 crc kubenswrapper[4788]: I1211 09:47:31.900537 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89b62ded-d0fb-4df2-b6a0-1c72d1575865-logs" (OuterVolumeSpecName: "logs") pod "89b62ded-d0fb-4df2-b6a0-1c72d1575865" (UID: "89b62ded-d0fb-4df2-b6a0-1c72d1575865"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:47:31 crc kubenswrapper[4788]: I1211 09:47:31.908551 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89b62ded-d0fb-4df2-b6a0-1c72d1575865-kube-api-access-5jjj9" (OuterVolumeSpecName: "kube-api-access-5jjj9") pod "89b62ded-d0fb-4df2-b6a0-1c72d1575865" (UID: "89b62ded-d0fb-4df2-b6a0-1c72d1575865"). InnerVolumeSpecName "kube-api-access-5jjj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:47:31 crc kubenswrapper[4788]: I1211 09:47:31.946345 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89b62ded-d0fb-4df2-b6a0-1c72d1575865" (UID: "89b62ded-d0fb-4df2-b6a0-1c72d1575865"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:31 crc kubenswrapper[4788]: I1211 09:47:31.959106 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-config-data" (OuterVolumeSpecName: "config-data") pod "89b62ded-d0fb-4df2-b6a0-1c72d1575865" (UID: "89b62ded-d0fb-4df2-b6a0-1c72d1575865"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.001838 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.001890 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jjj9\" (UniqueName: \"kubernetes.io/projected/89b62ded-d0fb-4df2-b6a0-1c72d1575865-kube-api-access-5jjj9\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.001901 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/89b62ded-d0fb-4df2-b6a0-1c72d1575865-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.001910 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.030406 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "89b62ded-d0fb-4df2-b6a0-1c72d1575865" (UID: "89b62ded-d0fb-4df2-b6a0-1c72d1575865"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.103952 4788 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/89b62ded-d0fb-4df2-b6a0-1c72d1575865-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:32 crc kubenswrapper[4788]: E1211 09:47:32.521739 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe is running failed: container process not found" containerID="e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:47:32 crc kubenswrapper[4788]: E1211 09:47:32.522642 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe is running failed: container process not found" containerID="e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:47:32 crc kubenswrapper[4788]: E1211 09:47:32.526382 4788 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe is running failed: container process not found" containerID="e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 11 09:47:32 crc kubenswrapper[4788]: E1211 09:47:32.526461 4788 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="68d2836e-dfca-46dd-bfc5-58f6eaf649e5" containerName="nova-scheduler-scheduler" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.643697 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e44e4da-495b-45b4-bfef-27ccb5dd3a51","Type":"ContainerStarted","Data":"55dd271c78931f243cb8fe7f6f1ba4d90143b822ba545f7b8d3b2fb5257cc14c"} Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.646768 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"89b62ded-d0fb-4df2-b6a0-1c72d1575865","Type":"ContainerDied","Data":"5f9581b3d65d83c7528573712dda1d15c7dd312b9bf3062ccf21182e8fb6caad"} Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.646816 4788 scope.go:117] "RemoveContainer" containerID="a6a7fbecd1fc2c3a16e347f89e24f0cdbc6ef6f74f2a0a3e8f59dc9e037f2d10" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.646944 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.653778 4788 generic.go:334] "Generic (PLEG): container finished" podID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerID="11316b361358252fa7d5cd100d8b9918617959201f5220da5b45190788e88d7c" exitCode=0 Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.653816 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63b89091-40fe-446f-9f84-b6a2b0e98bad","Type":"ContainerDied","Data":"11316b361358252fa7d5cd100d8b9918617959201f5220da5b45190788e88d7c"} Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.682285 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.712402 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.729923 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:47:32 crc kubenswrapper[4788]: E1211 09:47:32.730531 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1098e947-9b9a-4eeb-8ecd-1c0e1253adcc" containerName="nova-manage" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.730555 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1098e947-9b9a-4eeb-8ecd-1c0e1253adcc" containerName="nova-manage" Dec 11 09:47:32 crc kubenswrapper[4788]: E1211 09:47:32.730577 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-log" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.730588 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-log" Dec 11 09:47:32 crc kubenswrapper[4788]: E1211 09:47:32.730604 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107c813a-c386-4ffe-8206-ff0badb52f03" containerName="init" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.730612 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="107c813a-c386-4ffe-8206-ff0badb52f03" containerName="init" Dec 11 09:47:32 crc kubenswrapper[4788]: E1211 09:47:32.730650 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107c813a-c386-4ffe-8206-ff0badb52f03" containerName="dnsmasq-dns" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.730660 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="107c813a-c386-4ffe-8206-ff0badb52f03" containerName="dnsmasq-dns" Dec 11 09:47:32 crc kubenswrapper[4788]: E1211 09:47:32.730682 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-metadata" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.730691 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-metadata" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.730961 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-log" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.730999 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" containerName="nova-metadata-metadata" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.731019 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="1098e947-9b9a-4eeb-8ecd-1c0e1253adcc" containerName="nova-manage" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.731038 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="107c813a-c386-4ffe-8206-ff0badb52f03" containerName="dnsmasq-dns" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.732430 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.735523 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.735989 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.745546 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.850053 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndp5j\" (UniqueName: \"kubernetes.io/projected/d255cc24-14bd-4114-938b-c91acbe5c5d2-kube-api-access-ndp5j\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.850213 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d255cc24-14bd-4114-938b-c91acbe5c5d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.850426 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d255cc24-14bd-4114-938b-c91acbe5c5d2-logs\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.850515 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d255cc24-14bd-4114-938b-c91acbe5c5d2-config-data\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.850553 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d255cc24-14bd-4114-938b-c91acbe5c5d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.952184 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndp5j\" (UniqueName: \"kubernetes.io/projected/d255cc24-14bd-4114-938b-c91acbe5c5d2-kube-api-access-ndp5j\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.952753 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d255cc24-14bd-4114-938b-c91acbe5c5d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.952968 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d255cc24-14bd-4114-938b-c91acbe5c5d2-logs\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.953137 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d255cc24-14bd-4114-938b-c91acbe5c5d2-config-data\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.953303 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d255cc24-14bd-4114-938b-c91acbe5c5d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.953986 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d255cc24-14bd-4114-938b-c91acbe5c5d2-logs\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.959122 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d255cc24-14bd-4114-938b-c91acbe5c5d2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.960176 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d255cc24-14bd-4114-938b-c91acbe5c5d2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.960982 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d255cc24-14bd-4114-938b-c91acbe5c5d2-config-data\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:32 crc kubenswrapper[4788]: I1211 09:47:32.972197 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndp5j\" (UniqueName: \"kubernetes.io/projected/d255cc24-14bd-4114-938b-c91acbe5c5d2-kube-api-access-ndp5j\") pod \"nova-metadata-0\" (UID: \"d255cc24-14bd-4114-938b-c91acbe5c5d2\") " pod="openstack/nova-metadata-0" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.053847 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.494273 4788 scope.go:117] "RemoveContainer" containerID="da468444b2e89b4e5c979b6fc23b53adfbc0421fa24be7380f1ef7bee21f1e56" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.643436 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.682838 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.682846 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"63b89091-40fe-446f-9f84-b6a2b0e98bad","Type":"ContainerDied","Data":"4cae3b52bbed7518224d32a0a59ee69d411eb0b9f3608b30c09283e31af8391a"} Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.769395 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-internal-tls-certs\") pod \"63b89091-40fe-446f-9f84-b6a2b0e98bad\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.769513 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-combined-ca-bundle\") pod \"63b89091-40fe-446f-9f84-b6a2b0e98bad\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.769551 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-config-data\") pod \"63b89091-40fe-446f-9f84-b6a2b0e98bad\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.769576 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nh8hw\" (UniqueName: \"kubernetes.io/projected/63b89091-40fe-446f-9f84-b6a2b0e98bad-kube-api-access-nh8hw\") pod \"63b89091-40fe-446f-9f84-b6a2b0e98bad\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.769722 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63b89091-40fe-446f-9f84-b6a2b0e98bad-logs\") pod \"63b89091-40fe-446f-9f84-b6a2b0e98bad\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.769785 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-public-tls-certs\") pod \"63b89091-40fe-446f-9f84-b6a2b0e98bad\" (UID: \"63b89091-40fe-446f-9f84-b6a2b0e98bad\") " Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.771005 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63b89091-40fe-446f-9f84-b6a2b0e98bad-logs" (OuterVolumeSpecName: "logs") pod "63b89091-40fe-446f-9f84-b6a2b0e98bad" (UID: "63b89091-40fe-446f-9f84-b6a2b0e98bad"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.789792 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63b89091-40fe-446f-9f84-b6a2b0e98bad-kube-api-access-nh8hw" (OuterVolumeSpecName: "kube-api-access-nh8hw") pod "63b89091-40fe-446f-9f84-b6a2b0e98bad" (UID: "63b89091-40fe-446f-9f84-b6a2b0e98bad"). InnerVolumeSpecName "kube-api-access-nh8hw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.798235 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "63b89091-40fe-446f-9f84-b6a2b0e98bad" (UID: "63b89091-40fe-446f-9f84-b6a2b0e98bad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.813366 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-config-data" (OuterVolumeSpecName: "config-data") pod "63b89091-40fe-446f-9f84-b6a2b0e98bad" (UID: "63b89091-40fe-446f-9f84-b6a2b0e98bad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.826346 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "63b89091-40fe-446f-9f84-b6a2b0e98bad" (UID: "63b89091-40fe-446f-9f84-b6a2b0e98bad"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.831760 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "63b89091-40fe-446f-9f84-b6a2b0e98bad" (UID: "63b89091-40fe-446f-9f84-b6a2b0e98bad"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.871977 4788 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.872026 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.872040 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.872053 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nh8hw\" (UniqueName: \"kubernetes.io/projected/63b89091-40fe-446f-9f84-b6a2b0e98bad-kube-api-access-nh8hw\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.872070 4788 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63b89091-40fe-446f-9f84-b6a2b0e98bad-logs\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.872080 4788 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/63b89091-40fe-446f-9f84-b6a2b0e98bad-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:33 crc kubenswrapper[4788]: I1211 09:47:33.989466 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.028067 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.046598 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.060285 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:34 crc kubenswrapper[4788]: E1211 09:47:34.060925 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerName="nova-api-log" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.060955 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerName="nova-api-log" Dec 11 09:47:34 crc kubenswrapper[4788]: E1211 09:47:34.060968 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerName="nova-api-api" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.060976 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerName="nova-api-api" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.061201 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerName="nova-api-api" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.061266 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="63b89091-40fe-446f-9f84-b6a2b0e98bad" containerName="nova-api-log" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.062722 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.066753 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.066931 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.066979 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.069530 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.177603 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.177903 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-public-tls-certs\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.177982 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-config-data\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.178044 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d35e32a-7f87-4a69-9233-7d8bb40fec75-logs\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.178414 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67h8l\" (UniqueName: \"kubernetes.io/projected/0d35e32a-7f87-4a69-9233-7d8bb40fec75-kube-api-access-67h8l\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.179090 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.281101 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.281594 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-public-tls-certs\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.281771 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-config-data\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.281916 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d35e32a-7f87-4a69-9233-7d8bb40fec75-logs\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.282127 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67h8l\" (UniqueName: \"kubernetes.io/projected/0d35e32a-7f87-4a69-9233-7d8bb40fec75-kube-api-access-67h8l\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.282315 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d35e32a-7f87-4a69-9233-7d8bb40fec75-logs\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.282412 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.285734 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.287832 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.288394 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-public-tls-certs\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.293226 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d35e32a-7f87-4a69-9233-7d8bb40fec75-config-data\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.304351 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67h8l\" (UniqueName: \"kubernetes.io/projected/0d35e32a-7f87-4a69-9233-7d8bb40fec75-kube-api-access-67h8l\") pod \"nova-api-0\" (UID: \"0d35e32a-7f87-4a69-9233-7d8bb40fec75\") " pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.384922 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.494396 4788 scope.go:117] "RemoveContainer" containerID="11316b361358252fa7d5cd100d8b9918617959201f5220da5b45190788e88d7c" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.516428 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63b89091-40fe-446f-9f84-b6a2b0e98bad" path="/var/lib/kubelet/pods/63b89091-40fe-446f-9f84-b6a2b0e98bad/volumes" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.517115 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89b62ded-d0fb-4df2-b6a0-1c72d1575865" path="/var/lib/kubelet/pods/89b62ded-d0fb-4df2-b6a0-1c72d1575865/volumes" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.537594 4788 scope.go:117] "RemoveContainer" containerID="b9a6698f8582f18125b1130c02a41edb0e0621af50b40bf019c02f468558ddcd" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.707941 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d255cc24-14bd-4114-938b-c91acbe5c5d2","Type":"ContainerStarted","Data":"7b8df6bdb87dab99775c927676ca50a8b42e38ca682e3138113085c3ae5dea12"} Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.713455 4788 generic.go:334] "Generic (PLEG): container finished" podID="68d2836e-dfca-46dd-bfc5-58f6eaf649e5" containerID="e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe" exitCode=0 Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.713572 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"68d2836e-dfca-46dd-bfc5-58f6eaf649e5","Type":"ContainerDied","Data":"e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe"} Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.713688 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.974954 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.528285077 podStartE2EDuration="17.974936427s" podCreationTimestamp="2025-12-11 09:47:17 +0000 UTC" firstStartedPulling="2025-12-11 09:47:18.698981452 +0000 UTC m=+1568.769761038" lastFinishedPulling="2025-12-11 09:47:32.145632802 +0000 UTC m=+1582.216412388" observedRunningTime="2025-12-11 09:47:34.752533048 +0000 UTC m=+1584.823312634" watchObservedRunningTime="2025-12-11 09:47:34.974936427 +0000 UTC m=+1585.045716003" Dec 11 09:47:34 crc kubenswrapper[4788]: I1211 09:47:34.982168 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 11 09:47:34 crc kubenswrapper[4788]: W1211 09:47:34.986840 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d35e32a_7f87_4a69_9233_7d8bb40fec75.slice/crio-4ebf46a13c8ebab1688c6a7009d7a4ef93b900faf22a7ed943862fc98a2e9863 WatchSource:0}: Error finding container 4ebf46a13c8ebab1688c6a7009d7a4ef93b900faf22a7ed943862fc98a2e9863: Status 404 returned error can't find the container with id 4ebf46a13c8ebab1688c6a7009d7a4ef93b900faf22a7ed943862fc98a2e9863 Dec 11 09:47:35 crc kubenswrapper[4788]: I1211 09:47:35.730579 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d255cc24-14bd-4114-938b-c91acbe5c5d2","Type":"ContainerStarted","Data":"0f0b037a5e1aad3ffb7f4cec5b8cc65ce7a179823b9964c96e758e086452e127"} Dec 11 09:47:35 crc kubenswrapper[4788]: I1211 09:47:35.733831 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0d35e32a-7f87-4a69-9233-7d8bb40fec75","Type":"ContainerStarted","Data":"61b82aa23c3091d703d3792953b89fb0fa68ae1e45e753b8289c44c08b460d95"} Dec 11 09:47:35 crc kubenswrapper[4788]: I1211 09:47:35.733997 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0d35e32a-7f87-4a69-9233-7d8bb40fec75","Type":"ContainerStarted","Data":"4ebf46a13c8ebab1688c6a7009d7a4ef93b900faf22a7ed943862fc98a2e9863"} Dec 11 09:47:35 crc kubenswrapper[4788]: I1211 09:47:35.922889 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.025395 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-combined-ca-bundle\") pod \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.025617 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6z5z\" (UniqueName: \"kubernetes.io/projected/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-kube-api-access-m6z5z\") pod \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.026342 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-config-data\") pod \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\" (UID: \"68d2836e-dfca-46dd-bfc5-58f6eaf649e5\") " Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.035855 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-kube-api-access-m6z5z" (OuterVolumeSpecName: "kube-api-access-m6z5z") pod "68d2836e-dfca-46dd-bfc5-58f6eaf649e5" (UID: "68d2836e-dfca-46dd-bfc5-58f6eaf649e5"). InnerVolumeSpecName "kube-api-access-m6z5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.058514 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-config-data" (OuterVolumeSpecName: "config-data") pod "68d2836e-dfca-46dd-bfc5-58f6eaf649e5" (UID: "68d2836e-dfca-46dd-bfc5-58f6eaf649e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.075087 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68d2836e-dfca-46dd-bfc5-58f6eaf649e5" (UID: "68d2836e-dfca-46dd-bfc5-58f6eaf649e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.129463 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.129499 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6z5z\" (UniqueName: \"kubernetes.io/projected/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-kube-api-access-m6z5z\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.129511 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68d2836e-dfca-46dd-bfc5-58f6eaf649e5-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.747036 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"68d2836e-dfca-46dd-bfc5-58f6eaf649e5","Type":"ContainerDied","Data":"e9bb4a320d843a954577b3ba14ac1ba5193d99165f88a4b4c47732a45484e3e0"} Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.747057 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.747368 4788 scope.go:117] "RemoveContainer" containerID="e0f97be702b47265d3e4eca39e4de98853c1b9a74166cc83dd5bc5d893c9c2fe" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.749940 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d255cc24-14bd-4114-938b-c91acbe5c5d2","Type":"ContainerStarted","Data":"081989ce95626ba55ebdcbfb434104a0fb1072bc685ddb344120f9dceb74216f"} Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.753861 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0d35e32a-7f87-4a69-9233-7d8bb40fec75","Type":"ContainerStarted","Data":"3f121d66eb5523c87f59a060c78793a03375babdad762f3196b23b0b786b3d8b"} Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.790525 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.790313291 podStartE2EDuration="4.790313291s" podCreationTimestamp="2025-12-11 09:47:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:47:36.7816239 +0000 UTC m=+1586.852403486" watchObservedRunningTime="2025-12-11 09:47:36.790313291 +0000 UTC m=+1586.861092877" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.815092 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.829242 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.840090 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:47:36 crc kubenswrapper[4788]: E1211 09:47:36.840808 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68d2836e-dfca-46dd-bfc5-58f6eaf649e5" containerName="nova-scheduler-scheduler" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.840831 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="68d2836e-dfca-46dd-bfc5-58f6eaf649e5" containerName="nova-scheduler-scheduler" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.841080 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="68d2836e-dfca-46dd-bfc5-58f6eaf649e5" containerName="nova-scheduler-scheduler" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.842665 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.842643795 podStartE2EDuration="2.842643795s" podCreationTimestamp="2025-12-11 09:47:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:47:36.819734681 +0000 UTC m=+1586.890514277" watchObservedRunningTime="2025-12-11 09:47:36.842643795 +0000 UTC m=+1586.913423391" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.843916 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b97d64be-b549-4abc-92a4-be155a300b1a-config-data\") pod \"nova-scheduler-0\" (UID: \"b97d64be-b549-4abc-92a4-be155a300b1a\") " pod="openstack/nova-scheduler-0" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.844026 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpthq\" (UniqueName: \"kubernetes.io/projected/b97d64be-b549-4abc-92a4-be155a300b1a-kube-api-access-xpthq\") pod \"nova-scheduler-0\" (UID: \"b97d64be-b549-4abc-92a4-be155a300b1a\") " pod="openstack/nova-scheduler-0" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.844136 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.844815 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97d64be-b549-4abc-92a4-be155a300b1a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b97d64be-b549-4abc-92a4-be155a300b1a\") " pod="openstack/nova-scheduler-0" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.850728 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.864048 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.947439 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b97d64be-b549-4abc-92a4-be155a300b1a-config-data\") pod \"nova-scheduler-0\" (UID: \"b97d64be-b549-4abc-92a4-be155a300b1a\") " pod="openstack/nova-scheduler-0" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.947983 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpthq\" (UniqueName: \"kubernetes.io/projected/b97d64be-b549-4abc-92a4-be155a300b1a-kube-api-access-xpthq\") pod \"nova-scheduler-0\" (UID: \"b97d64be-b549-4abc-92a4-be155a300b1a\") " pod="openstack/nova-scheduler-0" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.948216 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97d64be-b549-4abc-92a4-be155a300b1a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b97d64be-b549-4abc-92a4-be155a300b1a\") " pod="openstack/nova-scheduler-0" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.952371 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b97d64be-b549-4abc-92a4-be155a300b1a-config-data\") pod \"nova-scheduler-0\" (UID: \"b97d64be-b549-4abc-92a4-be155a300b1a\") " pod="openstack/nova-scheduler-0" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.953116 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97d64be-b549-4abc-92a4-be155a300b1a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b97d64be-b549-4abc-92a4-be155a300b1a\") " pod="openstack/nova-scheduler-0" Dec 11 09:47:36 crc kubenswrapper[4788]: I1211 09:47:36.969712 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpthq\" (UniqueName: \"kubernetes.io/projected/b97d64be-b549-4abc-92a4-be155a300b1a-kube-api-access-xpthq\") pod \"nova-scheduler-0\" (UID: \"b97d64be-b549-4abc-92a4-be155a300b1a\") " pod="openstack/nova-scheduler-0" Dec 11 09:47:37 crc kubenswrapper[4788]: I1211 09:47:37.169857 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 11 09:47:37 crc kubenswrapper[4788]: I1211 09:47:37.629938 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 11 09:47:37 crc kubenswrapper[4788]: I1211 09:47:37.764751 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b97d64be-b549-4abc-92a4-be155a300b1a","Type":"ContainerStarted","Data":"5af028d3d953d7fbcfa5fc13cb6815eb18a904b1595bee9cb0f91cb872df37f5"} Dec 11 09:47:38 crc kubenswrapper[4788]: I1211 09:47:38.054567 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 11 09:47:38 crc kubenswrapper[4788]: I1211 09:47:38.054942 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 11 09:47:38 crc kubenswrapper[4788]: I1211 09:47:38.508588 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68d2836e-dfca-46dd-bfc5-58f6eaf649e5" path="/var/lib/kubelet/pods/68d2836e-dfca-46dd-bfc5-58f6eaf649e5/volumes" Dec 11 09:47:38 crc kubenswrapper[4788]: I1211 09:47:38.776495 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b97d64be-b549-4abc-92a4-be155a300b1a","Type":"ContainerStarted","Data":"3f9a7ba7503f1a1695ead5eb3404c094da76339f35c224703a7b5758e7143cb1"} Dec 11 09:47:38 crc kubenswrapper[4788]: I1211 09:47:38.802209 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.802183164 podStartE2EDuration="2.802183164s" podCreationTimestamp="2025-12-11 09:47:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:47:38.791389569 +0000 UTC m=+1588.862169165" watchObservedRunningTime="2025-12-11 09:47:38.802183164 +0000 UTC m=+1588.872962750" Dec 11 09:47:42 crc kubenswrapper[4788]: I1211 09:47:42.170355 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 11 09:47:43 crc kubenswrapper[4788]: I1211 09:47:43.054581 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 11 09:47:43 crc kubenswrapper[4788]: I1211 09:47:43.054669 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 11 09:47:43 crc kubenswrapper[4788]: I1211 09:47:43.713308 4788 scope.go:117] "RemoveContainer" containerID="86b19325e48b31462b96a8958c158830421c81882b80a1a572c29d6fff67e72f" Dec 11 09:47:44 crc kubenswrapper[4788]: I1211 09:47:44.068469 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d255cc24-14bd-4114-938b-c91acbe5c5d2" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 11 09:47:44 crc kubenswrapper[4788]: I1211 09:47:44.068532 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d255cc24-14bd-4114-938b-c91acbe5c5d2" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 11 09:47:44 crc kubenswrapper[4788]: I1211 09:47:44.387339 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 11 09:47:44 crc kubenswrapper[4788]: I1211 09:47:44.387384 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 11 09:47:45 crc kubenswrapper[4788]: I1211 09:47:45.391738 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0d35e32a-7f87-4a69-9233-7d8bb40fec75" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 11 09:47:45 crc kubenswrapper[4788]: I1211 09:47:45.396658 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0d35e32a-7f87-4a69-9233-7d8bb40fec75" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 11 09:47:47 crc kubenswrapper[4788]: I1211 09:47:47.170683 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 11 09:47:47 crc kubenswrapper[4788]: I1211 09:47:47.207948 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 11 09:47:47 crc kubenswrapper[4788]: I1211 09:47:47.902413 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 11 09:47:48 crc kubenswrapper[4788]: I1211 09:47:48.191060 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 11 09:47:51 crc kubenswrapper[4788]: I1211 09:47:51.369448 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:47:51 crc kubenswrapper[4788]: I1211 09:47:51.370148 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:47:51 crc kubenswrapper[4788]: I1211 09:47:51.370199 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:47:51 crc kubenswrapper[4788]: I1211 09:47:51.370899 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 09:47:51 crc kubenswrapper[4788]: I1211 09:47:51.370974 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" gracePeriod=600 Dec 11 09:47:51 crc kubenswrapper[4788]: E1211 09:47:51.494740 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:47:51 crc kubenswrapper[4788]: I1211 09:47:51.918293 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" exitCode=0 Dec 11 09:47:51 crc kubenswrapper[4788]: I1211 09:47:51.918367 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1"} Dec 11 09:47:51 crc kubenswrapper[4788]: I1211 09:47:51.918446 4788 scope.go:117] "RemoveContainer" containerID="049e54d0d48c2b5f9432cbc0d1c729dca1d9ca44054024186e494a3f6fa981a9" Dec 11 09:47:51 crc kubenswrapper[4788]: I1211 09:47:51.919141 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:47:51 crc kubenswrapper[4788]: E1211 09:47:51.919439 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:47:53 crc kubenswrapper[4788]: I1211 09:47:53.062905 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 11 09:47:53 crc kubenswrapper[4788]: I1211 09:47:53.064223 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 11 09:47:53 crc kubenswrapper[4788]: I1211 09:47:53.088804 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 11 09:47:53 crc kubenswrapper[4788]: I1211 09:47:53.950799 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 11 09:47:54 crc kubenswrapper[4788]: I1211 09:47:54.393095 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 11 09:47:54 crc kubenswrapper[4788]: I1211 09:47:54.393565 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 11 09:47:54 crc kubenswrapper[4788]: I1211 09:47:54.394802 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 11 09:47:54 crc kubenswrapper[4788]: I1211 09:47:54.399471 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 11 09:47:54 crc kubenswrapper[4788]: I1211 09:47:54.960539 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 11 09:47:54 crc kubenswrapper[4788]: I1211 09:47:54.996521 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 11 09:48:03 crc kubenswrapper[4788]: I1211 09:48:03.592542 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 11 09:48:04 crc kubenswrapper[4788]: I1211 09:48:04.801853 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 11 09:48:06 crc kubenswrapper[4788]: I1211 09:48:06.496291 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:48:06 crc kubenswrapper[4788]: E1211 09:48:06.496692 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:48:08 crc kubenswrapper[4788]: I1211 09:48:08.175183 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="ef72aa19-1387-4180-957c-4bfec95e5562" containerName="rabbitmq" containerID="cri-o://26eb33385d08486834954168c890727574be50e64a22de92cf4ff8c02dac8bdd" gracePeriod=604796 Dec 11 09:48:08 crc kubenswrapper[4788]: I1211 09:48:08.836980 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="ef72aa19-1387-4180-957c-4bfec95e5562" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Dec 11 09:48:09 crc kubenswrapper[4788]: I1211 09:48:09.134020 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" containerName="rabbitmq" containerID="cri-o://0f19bd5a6a95b298d169849946fedcf95c25c2983d7367c81b8e61cc54a61939" gracePeriod=604796 Dec 11 09:48:18 crc kubenswrapper[4788]: I1211 09:48:18.836998 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="ef72aa19-1387-4180-957c-4bfec95e5562" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Dec 11 09:48:18 crc kubenswrapper[4788]: I1211 09:48:18.849613 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.187457 4788 generic.go:334] "Generic (PLEG): container finished" podID="ef72aa19-1387-4180-957c-4bfec95e5562" containerID="26eb33385d08486834954168c890727574be50e64a22de92cf4ff8c02dac8bdd" exitCode=0 Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.187499 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef72aa19-1387-4180-957c-4bfec95e5562","Type":"ContainerDied","Data":"26eb33385d08486834954168c890727574be50e64a22de92cf4ff8c02dac8bdd"} Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.280915 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.399413 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ef72aa19-1387-4180-957c-4bfec95e5562-pod-info\") pod \"ef72aa19-1387-4180-957c-4bfec95e5562\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.399878 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-confd\") pod \"ef72aa19-1387-4180-957c-4bfec95e5562\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.399936 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-erlang-cookie\") pod \"ef72aa19-1387-4180-957c-4bfec95e5562\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.400013 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5sq5f\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-kube-api-access-5sq5f\") pod \"ef72aa19-1387-4180-957c-4bfec95e5562\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.400077 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-plugins\") pod \"ef72aa19-1387-4180-957c-4bfec95e5562\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.400125 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-tls\") pod \"ef72aa19-1387-4180-957c-4bfec95e5562\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.400156 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ef72aa19-1387-4180-957c-4bfec95e5562-erlang-cookie-secret\") pod \"ef72aa19-1387-4180-957c-4bfec95e5562\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.400244 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-server-conf\") pod \"ef72aa19-1387-4180-957c-4bfec95e5562\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.400266 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ef72aa19-1387-4180-957c-4bfec95e5562\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.400305 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-plugins-conf\") pod \"ef72aa19-1387-4180-957c-4bfec95e5562\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.400363 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-config-data\") pod \"ef72aa19-1387-4180-957c-4bfec95e5562\" (UID: \"ef72aa19-1387-4180-957c-4bfec95e5562\") " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.400644 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "ef72aa19-1387-4180-957c-4bfec95e5562" (UID: "ef72aa19-1387-4180-957c-4bfec95e5562"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.401042 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "ef72aa19-1387-4180-957c-4bfec95e5562" (UID: "ef72aa19-1387-4180-957c-4bfec95e5562"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.401274 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "ef72aa19-1387-4180-957c-4bfec95e5562" (UID: "ef72aa19-1387-4180-957c-4bfec95e5562"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.401461 4788 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.401473 4788 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.401485 4788 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.408747 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/ef72aa19-1387-4180-957c-4bfec95e5562-pod-info" (OuterVolumeSpecName: "pod-info") pod "ef72aa19-1387-4180-957c-4bfec95e5562" (UID: "ef72aa19-1387-4180-957c-4bfec95e5562"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.409975 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "ef72aa19-1387-4180-957c-4bfec95e5562" (UID: "ef72aa19-1387-4180-957c-4bfec95e5562"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.425486 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef72aa19-1387-4180-957c-4bfec95e5562-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "ef72aa19-1387-4180-957c-4bfec95e5562" (UID: "ef72aa19-1387-4180-957c-4bfec95e5562"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.425969 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-kube-api-access-5sq5f" (OuterVolumeSpecName: "kube-api-access-5sq5f") pod "ef72aa19-1387-4180-957c-4bfec95e5562" (UID: "ef72aa19-1387-4180-957c-4bfec95e5562"). InnerVolumeSpecName "kube-api-access-5sq5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.426326 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "ef72aa19-1387-4180-957c-4bfec95e5562" (UID: "ef72aa19-1387-4180-957c-4bfec95e5562"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.451297 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-config-data" (OuterVolumeSpecName: "config-data") pod "ef72aa19-1387-4180-957c-4bfec95e5562" (UID: "ef72aa19-1387-4180-957c-4bfec95e5562"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.477113 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-server-conf" (OuterVolumeSpecName: "server-conf") pod "ef72aa19-1387-4180-957c-4bfec95e5562" (UID: "ef72aa19-1387-4180-957c-4bfec95e5562"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.496130 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:48:19 crc kubenswrapper[4788]: E1211 09:48:19.496488 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.506022 4788 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.506067 4788 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ef72aa19-1387-4180-957c-4bfec95e5562-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.506081 4788 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-server-conf\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.506118 4788 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.506135 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ef72aa19-1387-4180-957c-4bfec95e5562-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.506147 4788 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ef72aa19-1387-4180-957c-4bfec95e5562-pod-info\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.506160 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5sq5f\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-kube-api-access-5sq5f\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.544069 4788 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.552543 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "ef72aa19-1387-4180-957c-4bfec95e5562" (UID: "ef72aa19-1387-4180-957c-4bfec95e5562"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.607737 4788 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:19 crc kubenswrapper[4788]: I1211 09:48:19.607773 4788 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ef72aa19-1387-4180-957c-4bfec95e5562-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.202444 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef72aa19-1387-4180-957c-4bfec95e5562","Type":"ContainerDied","Data":"a8a1557262d2a19aba2965b49acc4e2ac64997e43396e16737d0fd93869979f1"} Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.202556 4788 scope.go:117] "RemoveContainer" containerID="26eb33385d08486834954168c890727574be50e64a22de92cf4ff8c02dac8bdd" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.202592 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.240022 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.250346 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.253185 4788 scope.go:117] "RemoveContainer" containerID="ecdda98068d7c12b5c293a43670f5aa25750a921ae152c57a3c48bdaf6f07e5d" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.272034 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 11 09:48:20 crc kubenswrapper[4788]: E1211 09:48:20.272505 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef72aa19-1387-4180-957c-4bfec95e5562" containerName="rabbitmq" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.272519 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef72aa19-1387-4180-957c-4bfec95e5562" containerName="rabbitmq" Dec 11 09:48:20 crc kubenswrapper[4788]: E1211 09:48:20.272542 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef72aa19-1387-4180-957c-4bfec95e5562" containerName="setup-container" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.272549 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef72aa19-1387-4180-957c-4bfec95e5562" containerName="setup-container" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.272845 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef72aa19-1387-4180-957c-4bfec95e5562" containerName="rabbitmq" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.275157 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.278751 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-nhvl8" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.278983 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.279141 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.279295 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.279409 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.279550 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.279693 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.289519 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.424896 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.425386 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.425453 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpvmf\" (UniqueName: \"kubernetes.io/projected/2e0d2c67-915a-4461-ab83-75e349c18950-kube-api-access-mpvmf\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.425486 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.425517 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2e0d2c67-915a-4461-ab83-75e349c18950-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.425727 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2e0d2c67-915a-4461-ab83-75e349c18950-config-data\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.425834 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2e0d2c67-915a-4461-ab83-75e349c18950-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.425937 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.426106 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.426157 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2e0d2c67-915a-4461-ab83-75e349c18950-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.426213 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2e0d2c67-915a-4461-ab83-75e349c18950-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.507158 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef72aa19-1387-4180-957c-4bfec95e5562" path="/var/lib/kubelet/pods/ef72aa19-1387-4180-957c-4bfec95e5562/volumes" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.527663 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.527729 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2e0d2c67-915a-4461-ab83-75e349c18950-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.527775 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2e0d2c67-915a-4461-ab83-75e349c18950-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.527870 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.527903 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.527936 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpvmf\" (UniqueName: \"kubernetes.io/projected/2e0d2c67-915a-4461-ab83-75e349c18950-kube-api-access-mpvmf\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.527962 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.527990 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2e0d2c67-915a-4461-ab83-75e349c18950-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.528041 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2e0d2c67-915a-4461-ab83-75e349c18950-config-data\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.528076 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2e0d2c67-915a-4461-ab83-75e349c18950-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.528116 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.528602 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.529021 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.529153 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.529528 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2e0d2c67-915a-4461-ab83-75e349c18950-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.530321 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2e0d2c67-915a-4461-ab83-75e349c18950-config-data\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.533059 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2e0d2c67-915a-4461-ab83-75e349c18950-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.544561 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.544816 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2e0d2c67-915a-4461-ab83-75e349c18950-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.545344 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2e0d2c67-915a-4461-ab83-75e349c18950-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.545408 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2e0d2c67-915a-4461-ab83-75e349c18950-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.549214 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpvmf\" (UniqueName: \"kubernetes.io/projected/2e0d2c67-915a-4461-ab83-75e349c18950-kube-api-access-mpvmf\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.566708 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"2e0d2c67-915a-4461-ab83-75e349c18950\") " pod="openstack/rabbitmq-server-0" Dec 11 09:48:20 crc kubenswrapper[4788]: I1211 09:48:20.634388 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:25.257525 4788 generic.go:334] "Generic (PLEG): container finished" podID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" containerID="0f19bd5a6a95b298d169849946fedcf95c25c2983d7367c81b8e61cc54a61939" exitCode=0 Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:25.257631 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c377af1f-84a0-4b96-8fa2-0f66751cd40d","Type":"ContainerDied","Data":"0f19bd5a6a95b298d169849946fedcf95c25c2983d7367c81b8e61cc54a61939"} Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.256051 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d558885bc-g85c7"] Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.258591 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.260804 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.276984 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-g85c7"] Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.369106 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-config\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.369171 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2fg5\" (UniqueName: \"kubernetes.io/projected/eb92cfd4-e88d-43ca-a9ad-23e054c57528-kube-api-access-t2fg5\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.369238 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.369281 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.369297 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.369331 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-svc\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.369433 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.471963 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.472033 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-config\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.472082 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2fg5\" (UniqueName: \"kubernetes.io/projected/eb92cfd4-e88d-43ca-a9ad-23e054c57528-kube-api-access-t2fg5\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.472137 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.472171 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.472190 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.472248 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-svc\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.473557 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-svc\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.474274 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-openstack-edpm-ipam\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.474906 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-config\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.475435 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-swift-storage-0\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.475476 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-sb\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.476148 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-nb\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.497589 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2fg5\" (UniqueName: \"kubernetes.io/projected/eb92cfd4-e88d-43ca-a9ad-23e054c57528-kube-api-access-t2fg5\") pod \"dnsmasq-dns-d558885bc-g85c7\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.584918 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:27 crc kubenswrapper[4788]: I1211 09:48:27.851042 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.012576 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.086312 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.086553 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-plugins\") pod \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.086685 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm9g5\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-kube-api-access-jm9g5\") pod \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.086753 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-tls\") pod \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.086793 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-plugins-conf\") pod \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.086817 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c377af1f-84a0-4b96-8fa2-0f66751cd40d-erlang-cookie-secret\") pod \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.086916 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-server-conf\") pod \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.087001 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-confd\") pod \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.087046 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-config-data\") pod \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.087068 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-erlang-cookie\") pod \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.087125 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c377af1f-84a0-4b96-8fa2-0f66751cd40d-pod-info\") pod \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\" (UID: \"c377af1f-84a0-4b96-8fa2-0f66751cd40d\") " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.087540 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c377af1f-84a0-4b96-8fa2-0f66751cd40d" (UID: "c377af1f-84a0-4b96-8fa2-0f66751cd40d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.087838 4788 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.090348 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c377af1f-84a0-4b96-8fa2-0f66751cd40d" (UID: "c377af1f-84a0-4b96-8fa2-0f66751cd40d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.090616 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c377af1f-84a0-4b96-8fa2-0f66751cd40d" (UID: "c377af1f-84a0-4b96-8fa2-0f66751cd40d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.093799 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c377af1f-84a0-4b96-8fa2-0f66751cd40d-pod-info" (OuterVolumeSpecName: "pod-info") pod "c377af1f-84a0-4b96-8fa2-0f66751cd40d" (UID: "c377af1f-84a0-4b96-8fa2-0f66751cd40d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.094386 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "c377af1f-84a0-4b96-8fa2-0f66751cd40d" (UID: "c377af1f-84a0-4b96-8fa2-0f66751cd40d"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.094618 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c377af1f-84a0-4b96-8fa2-0f66751cd40d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c377af1f-84a0-4b96-8fa2-0f66751cd40d" (UID: "c377af1f-84a0-4b96-8fa2-0f66751cd40d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.098625 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-kube-api-access-jm9g5" (OuterVolumeSpecName: "kube-api-access-jm9g5") pod "c377af1f-84a0-4b96-8fa2-0f66751cd40d" (UID: "c377af1f-84a0-4b96-8fa2-0f66751cd40d"). InnerVolumeSpecName "kube-api-access-jm9g5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.098804 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c377af1f-84a0-4b96-8fa2-0f66751cd40d" (UID: "c377af1f-84a0-4b96-8fa2-0f66751cd40d"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.118949 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-config-data" (OuterVolumeSpecName: "config-data") pod "c377af1f-84a0-4b96-8fa2-0f66751cd40d" (UID: "c377af1f-84a0-4b96-8fa2-0f66751cd40d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.162536 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-server-conf" (OuterVolumeSpecName: "server-conf") pod "c377af1f-84a0-4b96-8fa2-0f66751cd40d" (UID: "c377af1f-84a0-4b96-8fa2-0f66751cd40d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.191899 4788 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c377af1f-84a0-4b96-8fa2-0f66751cd40d-pod-info\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.192158 4788 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.192219 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm9g5\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-kube-api-access-jm9g5\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.192319 4788 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.192385 4788 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.192784 4788 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c377af1f-84a0-4b96-8fa2-0f66751cd40d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.192842 4788 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-server-conf\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.192890 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c377af1f-84a0-4b96-8fa2-0f66751cd40d-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.192946 4788 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.223369 4788 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.231330 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c377af1f-84a0-4b96-8fa2-0f66751cd40d" (UID: "c377af1f-84a0-4b96-8fa2-0f66751cd40d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.290565 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c377af1f-84a0-4b96-8fa2-0f66751cd40d","Type":"ContainerDied","Data":"fd3cbee5cff20b35471fce4d1947910e37802ad956352bd7958e417cf4c7289d"} Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.290918 4788 scope.go:117] "RemoveContainer" containerID="0f19bd5a6a95b298d169849946fedcf95c25c2983d7367c81b8e61cc54a61939" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.290597 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.292589 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2e0d2c67-915a-4461-ab83-75e349c18950","Type":"ContainerStarted","Data":"f89bdabc65fe72fb8e9fe0c7dedfb20bee001dcf787d7a621bf447054ff52e75"} Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.294663 4788 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.294687 4788 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c377af1f-84a0-4b96-8fa2-0f66751cd40d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.314186 4788 scope.go:117] "RemoveContainer" containerID="512fd2cbdec642fe0e8d83e6ef7e1028976ab3008554b4f0c9736d8bf1910af3" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.330382 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.341442 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.372892 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 11 09:48:28 crc kubenswrapper[4788]: E1211 09:48:28.373452 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" containerName="rabbitmq" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.373469 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" containerName="rabbitmq" Dec 11 09:48:28 crc kubenswrapper[4788]: E1211 09:48:28.373496 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" containerName="setup-container" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.373505 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" containerName="setup-container" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.373741 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" containerName="rabbitmq" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.385111 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.387158 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.387701 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.387802 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.388007 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.389194 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dbpfl" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.389303 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.389443 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.394005 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-g85c7"] Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.397061 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/866f442b-155f-40fb-836d-3cc2add24e36-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.397115 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.397151 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.397184 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.397264 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/866f442b-155f-40fb-836d-3cc2add24e36-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.397298 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/866f442b-155f-40fb-836d-3cc2add24e36-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.397328 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6v4l\" (UniqueName: \"kubernetes.io/projected/866f442b-155f-40fb-836d-3cc2add24e36-kube-api-access-d6v4l\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.397369 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.397416 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/866f442b-155f-40fb-836d-3cc2add24e36-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.397463 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/866f442b-155f-40fb-836d-3cc2add24e36-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.397493 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.416771 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.499948 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/866f442b-155f-40fb-836d-3cc2add24e36-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500009 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/866f442b-155f-40fb-836d-3cc2add24e36-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500036 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500061 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/866f442b-155f-40fb-836d-3cc2add24e36-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500104 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500138 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500172 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500272 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/866f442b-155f-40fb-836d-3cc2add24e36-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500330 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/866f442b-155f-40fb-836d-3cc2add24e36-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500479 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6v4l\" (UniqueName: \"kubernetes.io/projected/866f442b-155f-40fb-836d-3cc2add24e36-kube-api-access-d6v4l\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500501 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500909 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.500967 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/866f442b-155f-40fb-836d-3cc2add24e36-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.501163 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/866f442b-155f-40fb-836d-3cc2add24e36-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.501728 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/866f442b-155f-40fb-836d-3cc2add24e36-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.502024 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.502355 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.510455 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c377af1f-84a0-4b96-8fa2-0f66751cd40d" path="/var/lib/kubelet/pods/c377af1f-84a0-4b96-8fa2-0f66751cd40d/volumes" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.557427 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/866f442b-155f-40fb-836d-3cc2add24e36-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.559778 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/866f442b-155f-40fb-836d-3cc2add24e36-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.560085 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.560392 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/866f442b-155f-40fb-836d-3cc2add24e36-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.571776 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6v4l\" (UniqueName: \"kubernetes.io/projected/866f442b-155f-40fb-836d-3cc2add24e36-kube-api-access-d6v4l\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.597020 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"866f442b-155f-40fb-836d-3cc2add24e36\") " pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:28 crc kubenswrapper[4788]: I1211 09:48:28.706545 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:48:29 crc kubenswrapper[4788]: I1211 09:48:29.223962 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 11 09:48:29 crc kubenswrapper[4788]: W1211 09:48:29.226402 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod866f442b_155f_40fb_836d_3cc2add24e36.slice/crio-68655f32350cf858a777920d431f400988f330caecda363bc3576fd37cc3b8e1 WatchSource:0}: Error finding container 68655f32350cf858a777920d431f400988f330caecda363bc3576fd37cc3b8e1: Status 404 returned error can't find the container with id 68655f32350cf858a777920d431f400988f330caecda363bc3576fd37cc3b8e1 Dec 11 09:48:29 crc kubenswrapper[4788]: I1211 09:48:29.315529 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"866f442b-155f-40fb-836d-3cc2add24e36","Type":"ContainerStarted","Data":"68655f32350cf858a777920d431f400988f330caecda363bc3576fd37cc3b8e1"} Dec 11 09:48:29 crc kubenswrapper[4788]: I1211 09:48:29.316867 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-g85c7" event={"ID":"eb92cfd4-e88d-43ca-a9ad-23e054c57528","Type":"ContainerStarted","Data":"c10dd56e627da41c587babee2410abdd81c2aa9218122f0a2cd9ea14fef71166"} Dec 11 09:48:29 crc kubenswrapper[4788]: I1211 09:48:29.316924 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-g85c7" event={"ID":"eb92cfd4-e88d-43ca-a9ad-23e054c57528","Type":"ContainerStarted","Data":"4e2046bc1fa3777c0ba80d296501d97f1cd6470102c11cde7bd847b5bc8dcd59"} Dec 11 09:48:30 crc kubenswrapper[4788]: I1211 09:48:30.330713 4788 generic.go:334] "Generic (PLEG): container finished" podID="eb92cfd4-e88d-43ca-a9ad-23e054c57528" containerID="c10dd56e627da41c587babee2410abdd81c2aa9218122f0a2cd9ea14fef71166" exitCode=0 Dec 11 09:48:30 crc kubenswrapper[4788]: I1211 09:48:30.330775 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-g85c7" event={"ID":"eb92cfd4-e88d-43ca-a9ad-23e054c57528","Type":"ContainerDied","Data":"c10dd56e627da41c587babee2410abdd81c2aa9218122f0a2cd9ea14fef71166"} Dec 11 09:48:30 crc kubenswrapper[4788]: I1211 09:48:30.335729 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2e0d2c67-915a-4461-ab83-75e349c18950","Type":"ContainerStarted","Data":"7e9a24b58855bd16975d76cf56552ba438f5ba5b849549e204bfca5c8cfeb293"} Dec 11 09:48:30 crc kubenswrapper[4788]: I1211 09:48:30.504069 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:48:30 crc kubenswrapper[4788]: E1211 09:48:30.504377 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:48:31 crc kubenswrapper[4788]: I1211 09:48:31.351786 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"866f442b-155f-40fb-836d-3cc2add24e36","Type":"ContainerStarted","Data":"ca47e2505cd13513bd18473b574cd1524c8c8ae23a26374be579fbe2f55aec3c"} Dec 11 09:48:31 crc kubenswrapper[4788]: I1211 09:48:31.354935 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-g85c7" event={"ID":"eb92cfd4-e88d-43ca-a9ad-23e054c57528","Type":"ContainerStarted","Data":"49b93e43623031a0ff23353d05009659d67068833373ca5a45503b1011b1e7db"} Dec 11 09:48:31 crc kubenswrapper[4788]: I1211 09:48:31.355055 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:31 crc kubenswrapper[4788]: I1211 09:48:31.418409 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d558885bc-g85c7" podStartSLOduration=4.418385503 podStartE2EDuration="4.418385503s" podCreationTimestamp="2025-12-11 09:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:48:31.410058191 +0000 UTC m=+1641.480837797" watchObservedRunningTime="2025-12-11 09:48:31.418385503 +0000 UTC m=+1641.489165099" Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.587632 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.670725 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-872c7"] Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.671605 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" podUID="208b9146-8d17-4510-b547-b4ca9d7d6feb" containerName="dnsmasq-dns" containerID="cri-o://783f9e601234bc83351dc3a3b2196f6f94db5ffa6aa7ebef4e8703035e95e1de" gracePeriod=10 Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.843556 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78c64bc9c5-7bm7g"] Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.854542 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.863346 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78c64bc9c5-7bm7g"] Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.948627 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-openstack-edpm-ipam\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.948719 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-ovsdbserver-sb\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.948738 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-dns-swift-storage-0\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.948793 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-ovsdbserver-nb\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.948844 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2km4h\" (UniqueName: \"kubernetes.io/projected/f620c56e-f069-4bd1-9dc9-092bf9beaf91-kube-api-access-2km4h\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.948903 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-dns-svc\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:37 crc kubenswrapper[4788]: I1211 09:48:37.948941 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-config\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.050808 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-config\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.051014 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-openstack-edpm-ipam\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.051073 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-ovsdbserver-sb\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.051096 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-dns-swift-storage-0\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.051159 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-ovsdbserver-nb\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.051211 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2km4h\" (UniqueName: \"kubernetes.io/projected/f620c56e-f069-4bd1-9dc9-092bf9beaf91-kube-api-access-2km4h\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.051290 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-dns-svc\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.051967 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-config\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.052102 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-ovsdbserver-sb\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.052167 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-openstack-edpm-ipam\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.052515 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-ovsdbserver-nb\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.052521 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-dns-swift-storage-0\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.052613 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f620c56e-f069-4bd1-9dc9-092bf9beaf91-dns-svc\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.079692 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2km4h\" (UniqueName: \"kubernetes.io/projected/f620c56e-f069-4bd1-9dc9-092bf9beaf91-kube-api-access-2km4h\") pod \"dnsmasq-dns-78c64bc9c5-7bm7g\" (UID: \"f620c56e-f069-4bd1-9dc9-092bf9beaf91\") " pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.184274 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.467157 4788 generic.go:334] "Generic (PLEG): container finished" podID="208b9146-8d17-4510-b547-b4ca9d7d6feb" containerID="783f9e601234bc83351dc3a3b2196f6f94db5ffa6aa7ebef4e8703035e95e1de" exitCode=0 Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.467296 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" event={"ID":"208b9146-8d17-4510-b547-b4ca9d7d6feb","Type":"ContainerDied","Data":"783f9e601234bc83351dc3a3b2196f6f94db5ffa6aa7ebef4e8703035e95e1de"} Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.651760 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78c64bc9c5-7bm7g"] Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.719213 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.877845 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-sb\") pod \"208b9146-8d17-4510-b547-b4ca9d7d6feb\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.878027 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-swift-storage-0\") pod \"208b9146-8d17-4510-b547-b4ca9d7d6feb\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.878088 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-config\") pod \"208b9146-8d17-4510-b547-b4ca9d7d6feb\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.878213 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-nb\") pod \"208b9146-8d17-4510-b547-b4ca9d7d6feb\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.878300 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjlsk\" (UniqueName: \"kubernetes.io/projected/208b9146-8d17-4510-b547-b4ca9d7d6feb-kube-api-access-kjlsk\") pod \"208b9146-8d17-4510-b547-b4ca9d7d6feb\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.878327 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-svc\") pod \"208b9146-8d17-4510-b547-b4ca9d7d6feb\" (UID: \"208b9146-8d17-4510-b547-b4ca9d7d6feb\") " Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.883891 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/208b9146-8d17-4510-b547-b4ca9d7d6feb-kube-api-access-kjlsk" (OuterVolumeSpecName: "kube-api-access-kjlsk") pod "208b9146-8d17-4510-b547-b4ca9d7d6feb" (UID: "208b9146-8d17-4510-b547-b4ca9d7d6feb"). InnerVolumeSpecName "kube-api-access-kjlsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.945198 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "208b9146-8d17-4510-b547-b4ca9d7d6feb" (UID: "208b9146-8d17-4510-b547-b4ca9d7d6feb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.946118 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "208b9146-8d17-4510-b547-b4ca9d7d6feb" (UID: "208b9146-8d17-4510-b547-b4ca9d7d6feb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.955648 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "208b9146-8d17-4510-b547-b4ca9d7d6feb" (UID: "208b9146-8d17-4510-b547-b4ca9d7d6feb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.970018 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "208b9146-8d17-4510-b547-b4ca9d7d6feb" (UID: "208b9146-8d17-4510-b547-b4ca9d7d6feb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.978990 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-config" (OuterVolumeSpecName: "config") pod "208b9146-8d17-4510-b547-b4ca9d7d6feb" (UID: "208b9146-8d17-4510-b547-b4ca9d7d6feb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.981803 4788 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.981856 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.981870 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.981883 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjlsk\" (UniqueName: \"kubernetes.io/projected/208b9146-8d17-4510-b547-b4ca9d7d6feb-kube-api-access-kjlsk\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.981915 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:38 crc kubenswrapper[4788]: I1211 09:48:38.981925 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/208b9146-8d17-4510-b547-b4ca9d7d6feb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:39 crc kubenswrapper[4788]: I1211 09:48:39.479550 4788 generic.go:334] "Generic (PLEG): container finished" podID="f620c56e-f069-4bd1-9dc9-092bf9beaf91" containerID="3b865cf4e08a65127d31e9c13c9b5c1e321f9073b6a85bf243544eb1df6af185" exitCode=0 Dec 11 09:48:39 crc kubenswrapper[4788]: I1211 09:48:39.479631 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" event={"ID":"f620c56e-f069-4bd1-9dc9-092bf9beaf91","Type":"ContainerDied","Data":"3b865cf4e08a65127d31e9c13c9b5c1e321f9073b6a85bf243544eb1df6af185"} Dec 11 09:48:39 crc kubenswrapper[4788]: I1211 09:48:39.479694 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" event={"ID":"f620c56e-f069-4bd1-9dc9-092bf9beaf91","Type":"ContainerStarted","Data":"9193c14c8c2db8800ee24dd6a61a27a15fbd5da5fa9491ba3a5e1f77538100b7"} Dec 11 09:48:39 crc kubenswrapper[4788]: I1211 09:48:39.483379 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" event={"ID":"208b9146-8d17-4510-b547-b4ca9d7d6feb","Type":"ContainerDied","Data":"55ffb176a98e1b8baf3d4ebb9949f27909e0db019e8252bcae73330ee339803e"} Dec 11 09:48:39 crc kubenswrapper[4788]: I1211 09:48:39.483460 4788 scope.go:117] "RemoveContainer" containerID="783f9e601234bc83351dc3a3b2196f6f94db5ffa6aa7ebef4e8703035e95e1de" Dec 11 09:48:39 crc kubenswrapper[4788]: I1211 09:48:39.483476 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-872c7" Dec 11 09:48:39 crc kubenswrapper[4788]: I1211 09:48:39.586167 4788 scope.go:117] "RemoveContainer" containerID="6faf8b5a46b4d36651ee5ba5f0bbe7ac0ff11f4c94c4ff4a2b39ac148ef8c095" Dec 11 09:48:39 crc kubenswrapper[4788]: I1211 09:48:39.589163 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-872c7"] Dec 11 09:48:39 crc kubenswrapper[4788]: I1211 09:48:39.602370 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-872c7"] Dec 11 09:48:40 crc kubenswrapper[4788]: I1211 09:48:40.509486 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="208b9146-8d17-4510-b547-b4ca9d7d6feb" path="/var/lib/kubelet/pods/208b9146-8d17-4510-b547-b4ca9d7d6feb/volumes" Dec 11 09:48:40 crc kubenswrapper[4788]: I1211 09:48:40.510216 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" event={"ID":"f620c56e-f069-4bd1-9dc9-092bf9beaf91","Type":"ContainerStarted","Data":"3b578f12e7c3a91cf3a03133cb45e12a75c7f385655ca80eb0d83a75c385d08b"} Dec 11 09:48:40 crc kubenswrapper[4788]: I1211 09:48:40.529844 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" podStartSLOduration=3.529823394 podStartE2EDuration="3.529823394s" podCreationTimestamp="2025-12-11 09:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:48:40.526287964 +0000 UTC m=+1650.597067560" watchObservedRunningTime="2025-12-11 09:48:40.529823394 +0000 UTC m=+1650.600602980" Dec 11 09:48:41 crc kubenswrapper[4788]: I1211 09:48:41.508937 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:43 crc kubenswrapper[4788]: I1211 09:48:43.994958 4788 scope.go:117] "RemoveContainer" containerID="87b5bc57894128ff451222c65a4ef22a3dda5c56f40835cc288b55267938f418" Dec 11 09:48:44 crc kubenswrapper[4788]: I1211 09:48:44.029916 4788 scope.go:117] "RemoveContainer" containerID="0ed4348b9228569d67703a8fd887ba7da79f772c3db0497c59e8a0b0ff2d7465" Dec 11 09:48:45 crc kubenswrapper[4788]: I1211 09:48:45.496623 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:48:45 crc kubenswrapper[4788]: E1211 09:48:45.497902 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:48:48 crc kubenswrapper[4788]: I1211 09:48:48.186136 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78c64bc9c5-7bm7g" Dec 11 09:48:48 crc kubenswrapper[4788]: I1211 09:48:48.269833 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-g85c7"] Dec 11 09:48:48 crc kubenswrapper[4788]: I1211 09:48:48.270336 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d558885bc-g85c7" podUID="eb92cfd4-e88d-43ca-a9ad-23e054c57528" containerName="dnsmasq-dns" containerID="cri-o://49b93e43623031a0ff23353d05009659d67068833373ca5a45503b1011b1e7db" gracePeriod=10 Dec 11 09:48:48 crc kubenswrapper[4788]: I1211 09:48:48.720729 4788 generic.go:334] "Generic (PLEG): container finished" podID="eb92cfd4-e88d-43ca-a9ad-23e054c57528" containerID="49b93e43623031a0ff23353d05009659d67068833373ca5a45503b1011b1e7db" exitCode=0 Dec 11 09:48:48 crc kubenswrapper[4788]: I1211 09:48:48.721111 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-g85c7" event={"ID":"eb92cfd4-e88d-43ca-a9ad-23e054c57528","Type":"ContainerDied","Data":"49b93e43623031a0ff23353d05009659d67068833373ca5a45503b1011b1e7db"} Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.609905 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.735133 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d558885bc-g85c7" event={"ID":"eb92cfd4-e88d-43ca-a9ad-23e054c57528","Type":"ContainerDied","Data":"4e2046bc1fa3777c0ba80d296501d97f1cd6470102c11cde7bd847b5bc8dcd59"} Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.735200 4788 scope.go:117] "RemoveContainer" containerID="49b93e43623031a0ff23353d05009659d67068833373ca5a45503b1011b1e7db" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.735249 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d558885bc-g85c7" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.747981 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-sb\") pod \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.748074 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-svc\") pod \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.748141 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2fg5\" (UniqueName: \"kubernetes.io/projected/eb92cfd4-e88d-43ca-a9ad-23e054c57528-kube-api-access-t2fg5\") pod \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.748211 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-openstack-edpm-ipam\") pod \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.748382 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-swift-storage-0\") pod \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.748444 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-config\") pod \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.748578 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-nb\") pod \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\" (UID: \"eb92cfd4-e88d-43ca-a9ad-23e054c57528\") " Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.754488 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb92cfd4-e88d-43ca-a9ad-23e054c57528-kube-api-access-t2fg5" (OuterVolumeSpecName: "kube-api-access-t2fg5") pod "eb92cfd4-e88d-43ca-a9ad-23e054c57528" (UID: "eb92cfd4-e88d-43ca-a9ad-23e054c57528"). InnerVolumeSpecName "kube-api-access-t2fg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.780897 4788 scope.go:117] "RemoveContainer" containerID="c10dd56e627da41c587babee2410abdd81c2aa9218122f0a2cd9ea14fef71166" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.810199 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "eb92cfd4-e88d-43ca-a9ad-23e054c57528" (UID: "eb92cfd4-e88d-43ca-a9ad-23e054c57528"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.810280 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "eb92cfd4-e88d-43ca-a9ad-23e054c57528" (UID: "eb92cfd4-e88d-43ca-a9ad-23e054c57528"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.810722 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-config" (OuterVolumeSpecName: "config") pod "eb92cfd4-e88d-43ca-a9ad-23e054c57528" (UID: "eb92cfd4-e88d-43ca-a9ad-23e054c57528"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.811658 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "eb92cfd4-e88d-43ca-a9ad-23e054c57528" (UID: "eb92cfd4-e88d-43ca-a9ad-23e054c57528"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.812263 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "eb92cfd4-e88d-43ca-a9ad-23e054c57528" (UID: "eb92cfd4-e88d-43ca-a9ad-23e054c57528"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.813196 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "eb92cfd4-e88d-43ca-a9ad-23e054c57528" (UID: "eb92cfd4-e88d-43ca-a9ad-23e054c57528"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.851064 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.851118 4788 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.851134 4788 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.851147 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2fg5\" (UniqueName: \"kubernetes.io/projected/eb92cfd4-e88d-43ca-a9ad-23e054c57528-kube-api-access-t2fg5\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.851160 4788 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.851170 4788 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:49 crc kubenswrapper[4788]: I1211 09:48:49.851181 4788 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb92cfd4-e88d-43ca-a9ad-23e054c57528-config\") on node \"crc\" DevicePath \"\"" Dec 11 09:48:50 crc kubenswrapper[4788]: I1211 09:48:50.079798 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-g85c7"] Dec 11 09:48:50 crc kubenswrapper[4788]: I1211 09:48:50.092367 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d558885bc-g85c7"] Dec 11 09:48:50 crc kubenswrapper[4788]: I1211 09:48:50.508535 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb92cfd4-e88d-43ca-a9ad-23e054c57528" path="/var/lib/kubelet/pods/eb92cfd4-e88d-43ca-a9ad-23e054c57528/volumes" Dec 11 09:48:56 crc kubenswrapper[4788]: I1211 09:48:56.495630 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:48:56 crc kubenswrapper[4788]: E1211 09:48:56.496716 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.027883 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb"] Dec 11 09:48:57 crc kubenswrapper[4788]: E1211 09:48:57.028556 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="208b9146-8d17-4510-b547-b4ca9d7d6feb" containerName="dnsmasq-dns" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.028583 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="208b9146-8d17-4510-b547-b4ca9d7d6feb" containerName="dnsmasq-dns" Dec 11 09:48:57 crc kubenswrapper[4788]: E1211 09:48:57.028637 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb92cfd4-e88d-43ca-a9ad-23e054c57528" containerName="init" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.028647 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb92cfd4-e88d-43ca-a9ad-23e054c57528" containerName="init" Dec 11 09:48:57 crc kubenswrapper[4788]: E1211 09:48:57.028664 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="208b9146-8d17-4510-b547-b4ca9d7d6feb" containerName="init" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.028674 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="208b9146-8d17-4510-b547-b4ca9d7d6feb" containerName="init" Dec 11 09:48:57 crc kubenswrapper[4788]: E1211 09:48:57.028706 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb92cfd4-e88d-43ca-a9ad-23e054c57528" containerName="dnsmasq-dns" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.028717 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb92cfd4-e88d-43ca-a9ad-23e054c57528" containerName="dnsmasq-dns" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.029012 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="208b9146-8d17-4510-b547-b4ca9d7d6feb" containerName="dnsmasq-dns" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.029073 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb92cfd4-e88d-43ca-a9ad-23e054c57528" containerName="dnsmasq-dns" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.030044 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.032421 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.033811 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.034967 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.040582 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.052732 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb"] Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.211200 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.211710 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf5l7\" (UniqueName: \"kubernetes.io/projected/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-kube-api-access-zf5l7\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.211810 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.211888 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.314636 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.314714 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.314846 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.314968 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf5l7\" (UniqueName: \"kubernetes.io/projected/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-kube-api-access-zf5l7\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.321282 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.321752 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.322673 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.332942 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf5l7\" (UniqueName: \"kubernetes.io/projected/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-kube-api-access-zf5l7\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.355988 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:48:57 crc kubenswrapper[4788]: I1211 09:48:57.946876 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb"] Dec 11 09:48:57 crc kubenswrapper[4788]: W1211 09:48:57.958713 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod888d4b4c_ac58_4ac8_8c53_fa41a750aaef.slice/crio-124c318c49a76bd30638f63e588846fda505a36533ee82bd11aee93be29e2d0f WatchSource:0}: Error finding container 124c318c49a76bd30638f63e588846fda505a36533ee82bd11aee93be29e2d0f: Status 404 returned error can't find the container with id 124c318c49a76bd30638f63e588846fda505a36533ee82bd11aee93be29e2d0f Dec 11 09:48:58 crc kubenswrapper[4788]: I1211 09:48:58.854696 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" event={"ID":"888d4b4c-ac58-4ac8-8c53-fa41a750aaef","Type":"ContainerStarted","Data":"124c318c49a76bd30638f63e588846fda505a36533ee82bd11aee93be29e2d0f"} Dec 11 09:49:02 crc kubenswrapper[4788]: I1211 09:49:02.896619 4788 generic.go:334] "Generic (PLEG): container finished" podID="2e0d2c67-915a-4461-ab83-75e349c18950" containerID="7e9a24b58855bd16975d76cf56552ba438f5ba5b849549e204bfca5c8cfeb293" exitCode=0 Dec 11 09:49:02 crc kubenswrapper[4788]: I1211 09:49:02.896716 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2e0d2c67-915a-4461-ab83-75e349c18950","Type":"ContainerDied","Data":"7e9a24b58855bd16975d76cf56552ba438f5ba5b849549e204bfca5c8cfeb293"} Dec 11 09:49:03 crc kubenswrapper[4788]: I1211 09:49:03.995687 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2e0d2c67-915a-4461-ab83-75e349c18950","Type":"ContainerStarted","Data":"164413ce1c09a794020665d2d36c508dd02645678fb567cb358bad88cd2c69a1"} Dec 11 09:49:03 crc kubenswrapper[4788]: I1211 09:49:03.996618 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 11 09:49:04 crc kubenswrapper[4788]: I1211 09:49:04.000440 4788 generic.go:334] "Generic (PLEG): container finished" podID="866f442b-155f-40fb-836d-3cc2add24e36" containerID="ca47e2505cd13513bd18473b574cd1524c8c8ae23a26374be579fbe2f55aec3c" exitCode=0 Dec 11 09:49:04 crc kubenswrapper[4788]: I1211 09:49:04.000493 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"866f442b-155f-40fb-836d-3cc2add24e36","Type":"ContainerDied","Data":"ca47e2505cd13513bd18473b574cd1524c8c8ae23a26374be579fbe2f55aec3c"} Dec 11 09:49:04 crc kubenswrapper[4788]: I1211 09:49:04.064306 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=44.064281007 podStartE2EDuration="44.064281007s" podCreationTimestamp="2025-12-11 09:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:49:04.037172626 +0000 UTC m=+1674.107952212" watchObservedRunningTime="2025-12-11 09:49:04.064281007 +0000 UTC m=+1674.135060593" Dec 11 09:49:05 crc kubenswrapper[4788]: I1211 09:49:05.022876 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"866f442b-155f-40fb-836d-3cc2add24e36","Type":"ContainerStarted","Data":"960f68d7bb15806bca5e264e067f5bc45ed9e27edb8497ea878664291cf5ac31"} Dec 11 09:49:05 crc kubenswrapper[4788]: I1211 09:49:05.023586 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:49:07 crc kubenswrapper[4788]: I1211 09:49:07.496095 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:49:07 crc kubenswrapper[4788]: E1211 09:49:07.496704 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:49:18 crc kubenswrapper[4788]: I1211 09:49:18.710563 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="866f442b-155f-40fb-836d-3cc2add24e36" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.210:5671: connect: connection refused" Dec 11 09:49:20 crc kubenswrapper[4788]: I1211 09:49:20.637686 4788 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="2e0d2c67-915a-4461-ab83-75e349c18950" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.208:5671: connect: connection refused" Dec 11 09:49:22 crc kubenswrapper[4788]: I1211 09:49:22.496764 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:49:22 crc kubenswrapper[4788]: E1211 09:49:22.498901 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:49:23 crc kubenswrapper[4788]: E1211 09:49:23.081184 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/openstack-k8s-operators/openstack-ansibleee-runner:642e3b59f86dee85a0b2a804fa14cdf4e3bee1cf" Dec 11 09:49:23 crc kubenswrapper[4788]: E1211 09:49:23.081289 4788 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/openstack-k8s-operators/openstack-ansibleee-runner:642e3b59f86dee85a0b2a804fa14cdf4e3bee1cf" Dec 11 09:49:23 crc kubenswrapper[4788]: E1211 09:49:23.081445 4788 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 11 09:49:23 crc kubenswrapper[4788]: container &Container{Name:repo-setup-edpm-deployment-openstack-edpm-ipam,Image:quay.rdoproject.org/openstack-k8s-operators/openstack-ansibleee-runner:642e3b59f86dee85a0b2a804fa14cdf4e3bee1cf,Command:[],Args:[ansible-runner run /runner -p playbook.yaml -i repo-setup-edpm-deployment-openstack-edpm-ipam],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:ANSIBLE_VERBOSITY,Value:2,ValueFrom:nil,},EnvVar{Name:RUNNER_PLAYBOOK,Value: Dec 11 09:49:23 crc kubenswrapper[4788]: - hosts: all Dec 11 09:49:23 crc kubenswrapper[4788]: strategy: linear Dec 11 09:49:23 crc kubenswrapper[4788]: tasks: Dec 11 09:49:23 crc kubenswrapper[4788]: - name: Enable podified-repos Dec 11 09:49:23 crc kubenswrapper[4788]: become: true Dec 11 09:49:23 crc kubenswrapper[4788]: ansible.builtin.shell: | Dec 11 09:49:23 crc kubenswrapper[4788]: set -euxo pipefail Dec 11 09:49:23 crc kubenswrapper[4788]: pushd /var/tmp Dec 11 09:49:23 crc kubenswrapper[4788]: curl -sL https://github.com/openstack-k8s-operators/repo-setup/archive/refs/heads/main.tar.gz | tar -xz Dec 11 09:49:23 crc kubenswrapper[4788]: pushd repo-setup-main Dec 11 09:49:23 crc kubenswrapper[4788]: python3 -m venv ./venv Dec 11 09:49:23 crc kubenswrapper[4788]: PBR_VERSION=0.0.0 ./venv/bin/pip install ./ Dec 11 09:49:23 crc kubenswrapper[4788]: ./venv/bin/repo-setup current-podified -b antelope Dec 11 09:49:23 crc kubenswrapper[4788]: popd Dec 11 09:49:23 crc kubenswrapper[4788]: rm -rf repo-setup-main Dec 11 09:49:23 crc kubenswrapper[4788]: Dec 11 09:49:23 crc kubenswrapper[4788]: Dec 11 09:49:23 crc kubenswrapper[4788]: ,ValueFrom:nil,},EnvVar{Name:RUNNER_EXTRA_VARS,Value: Dec 11 09:49:23 crc kubenswrapper[4788]: edpm_override_hosts: openstack-edpm-ipam Dec 11 09:49:23 crc kubenswrapper[4788]: edpm_service_type: repo-setup Dec 11 09:49:23 crc kubenswrapper[4788]: Dec 11 09:49:23 crc kubenswrapper[4788]: Dec 11 09:49:23 crc kubenswrapper[4788]: ,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:repo-setup-combined-ca-bundle,ReadOnly:false,MountPath:/var/lib/openstack/cacerts/repo-setup,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/runner/env/ssh_key,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:inventory,ReadOnly:false,MountPath:/runner/inventory/hosts,SubPath:inventory,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zf5l7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:openstack-aee-default-env,},Optional:*true,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb_openstack(888d4b4c-ac58-4ac8-8c53-fa41a750aaef): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled Dec 11 09:49:23 crc kubenswrapper[4788]: > logger="UnhandledError" Dec 11 09:49:23 crc kubenswrapper[4788]: E1211 09:49:23.082798 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"repo-setup-edpm-deployment-openstack-edpm-ipam\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" podUID="888d4b4c-ac58-4ac8-8c53-fa41a750aaef" Dec 11 09:49:23 crc kubenswrapper[4788]: E1211 09:49:23.204823 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"repo-setup-edpm-deployment-openstack-edpm-ipam\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/openstack-k8s-operators/openstack-ansibleee-runner:642e3b59f86dee85a0b2a804fa14cdf4e3bee1cf\\\"\"" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" podUID="888d4b4c-ac58-4ac8-8c53-fa41a750aaef" Dec 11 09:49:23 crc kubenswrapper[4788]: I1211 09:49:23.232993 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=55.232972074 podStartE2EDuration="55.232972074s" podCreationTimestamp="2025-12-11 09:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 09:49:05.054360999 +0000 UTC m=+1675.125140655" watchObservedRunningTime="2025-12-11 09:49:23.232972074 +0000 UTC m=+1693.303751650" Dec 11 09:49:28 crc kubenswrapper[4788]: I1211 09:49:28.711161 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 11 09:49:30 crc kubenswrapper[4788]: I1211 09:49:30.639523 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 11 09:49:36 crc kubenswrapper[4788]: I1211 09:49:36.499583 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 09:49:36 crc kubenswrapper[4788]: I1211 09:49:36.690335 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:49:37 crc kubenswrapper[4788]: I1211 09:49:37.337477 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" event={"ID":"888d4b4c-ac58-4ac8-8c53-fa41a750aaef","Type":"ContainerStarted","Data":"278279dfdaed66d08df305ce8e529956fa597e39bf517f0e0ca10d20a7ebfc32"} Dec 11 09:49:37 crc kubenswrapper[4788]: I1211 09:49:37.366651 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" podStartSLOduration=2.641824868 podStartE2EDuration="41.366630478s" podCreationTimestamp="2025-12-11 09:48:56 +0000 UTC" firstStartedPulling="2025-12-11 09:48:57.962466077 +0000 UTC m=+1668.033245673" lastFinishedPulling="2025-12-11 09:49:36.687271697 +0000 UTC m=+1706.758051283" observedRunningTime="2025-12-11 09:49:37.357086855 +0000 UTC m=+1707.427866461" watchObservedRunningTime="2025-12-11 09:49:37.366630478 +0000 UTC m=+1707.437410064" Dec 11 09:49:37 crc kubenswrapper[4788]: I1211 09:49:37.496398 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:49:37 crc kubenswrapper[4788]: E1211 09:49:37.497053 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:49:49 crc kubenswrapper[4788]: I1211 09:49:49.495585 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:49:49 crc kubenswrapper[4788]: E1211 09:49:49.496361 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:49:50 crc kubenswrapper[4788]: I1211 09:49:50.477804 4788 generic.go:334] "Generic (PLEG): container finished" podID="888d4b4c-ac58-4ac8-8c53-fa41a750aaef" containerID="278279dfdaed66d08df305ce8e529956fa597e39bf517f0e0ca10d20a7ebfc32" exitCode=0 Dec 11 09:49:50 crc kubenswrapper[4788]: I1211 09:49:50.477900 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" event={"ID":"888d4b4c-ac58-4ac8-8c53-fa41a750aaef","Type":"ContainerDied","Data":"278279dfdaed66d08df305ce8e529956fa597e39bf517f0e0ca10d20a7ebfc32"} Dec 11 09:49:51 crc kubenswrapper[4788]: I1211 09:49:51.917218 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.115160 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-ssh-key\") pod \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.115356 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf5l7\" (UniqueName: \"kubernetes.io/projected/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-kube-api-access-zf5l7\") pod \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.115394 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-inventory\") pod \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.115528 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-repo-setup-combined-ca-bundle\") pod \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\" (UID: \"888d4b4c-ac58-4ac8-8c53-fa41a750aaef\") " Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.122319 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-kube-api-access-zf5l7" (OuterVolumeSpecName: "kube-api-access-zf5l7") pod "888d4b4c-ac58-4ac8-8c53-fa41a750aaef" (UID: "888d4b4c-ac58-4ac8-8c53-fa41a750aaef"). InnerVolumeSpecName "kube-api-access-zf5l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.123845 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "888d4b4c-ac58-4ac8-8c53-fa41a750aaef" (UID: "888d4b4c-ac58-4ac8-8c53-fa41a750aaef"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.200317 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-inventory" (OuterVolumeSpecName: "inventory") pod "888d4b4c-ac58-4ac8-8c53-fa41a750aaef" (UID: "888d4b4c-ac58-4ac8-8c53-fa41a750aaef"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.204337 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "888d4b4c-ac58-4ac8-8c53-fa41a750aaef" (UID: "888d4b4c-ac58-4ac8-8c53-fa41a750aaef"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.219042 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.219635 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.219717 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf5l7\" (UniqueName: \"kubernetes.io/projected/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-kube-api-access-zf5l7\") on node \"crc\" DevicePath \"\"" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.219792 4788 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/888d4b4c-ac58-4ac8-8c53-fa41a750aaef-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.507779 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.509763 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb" event={"ID":"888d4b4c-ac58-4ac8-8c53-fa41a750aaef","Type":"ContainerDied","Data":"124c318c49a76bd30638f63e588846fda505a36533ee82bd11aee93be29e2d0f"} Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.509810 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="124c318c49a76bd30638f63e588846fda505a36533ee82bd11aee93be29e2d0f" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.632267 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb"] Dec 11 09:49:52 crc kubenswrapper[4788]: E1211 09:49:52.632990 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="888d4b4c-ac58-4ac8-8c53-fa41a750aaef" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.633068 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="888d4b4c-ac58-4ac8-8c53-fa41a750aaef" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.633381 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="888d4b4c-ac58-4ac8-8c53-fa41a750aaef" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.634605 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.637323 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.638197 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.639039 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.639364 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.653559 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb"] Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.834204 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phwpb\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.834862 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnk2d\" (UniqueName: \"kubernetes.io/projected/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-kube-api-access-rnk2d\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phwpb\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.835033 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phwpb\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.938288 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnk2d\" (UniqueName: \"kubernetes.io/projected/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-kube-api-access-rnk2d\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phwpb\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.938389 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phwpb\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.938583 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phwpb\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.946301 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phwpb\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.949000 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phwpb\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.957780 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnk2d\" (UniqueName: \"kubernetes.io/projected/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-kube-api-access-rnk2d\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phwpb\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:49:52 crc kubenswrapper[4788]: I1211 09:49:52.966194 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:49:53 crc kubenswrapper[4788]: I1211 09:49:53.561753 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb"] Dec 11 09:49:54 crc kubenswrapper[4788]: I1211 09:49:54.533106 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" event={"ID":"3b286cdf-f5f9-4ad2-ab37-0e4697309be5","Type":"ContainerStarted","Data":"df1dd63859e1a3dfcd8cdcb53565603bfa31313e7c8d3a1c631699ab6be12ff2"} Dec 11 09:49:55 crc kubenswrapper[4788]: I1211 09:49:55.545803 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" event={"ID":"3b286cdf-f5f9-4ad2-ab37-0e4697309be5","Type":"ContainerStarted","Data":"2f099543b70ecc8d76291c942cd4e4bdb01e33f456d53a1ab4c13596af3ec02d"} Dec 11 09:49:55 crc kubenswrapper[4788]: I1211 09:49:55.572190 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" podStartSLOduration=2.107170627 podStartE2EDuration="3.572147787s" podCreationTimestamp="2025-12-11 09:49:52 +0000 UTC" firstStartedPulling="2025-12-11 09:49:53.568501424 +0000 UTC m=+1723.639281010" lastFinishedPulling="2025-12-11 09:49:55.033478584 +0000 UTC m=+1725.104258170" observedRunningTime="2025-12-11 09:49:55.560586513 +0000 UTC m=+1725.631366099" watchObservedRunningTime="2025-12-11 09:49:55.572147787 +0000 UTC m=+1725.642927373" Dec 11 09:49:58 crc kubenswrapper[4788]: I1211 09:49:58.579221 4788 generic.go:334] "Generic (PLEG): container finished" podID="3b286cdf-f5f9-4ad2-ab37-0e4697309be5" containerID="2f099543b70ecc8d76291c942cd4e4bdb01e33f456d53a1ab4c13596af3ec02d" exitCode=0 Dec 11 09:49:58 crc kubenswrapper[4788]: I1211 09:49:58.579354 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" event={"ID":"3b286cdf-f5f9-4ad2-ab37-0e4697309be5","Type":"ContainerDied","Data":"2f099543b70ecc8d76291c942cd4e4bdb01e33f456d53a1ab4c13596af3ec02d"} Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.007311 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.107085 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-ssh-key\") pod \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.107140 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-inventory\") pod \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.107184 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnk2d\" (UniqueName: \"kubernetes.io/projected/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-kube-api-access-rnk2d\") pod \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\" (UID: \"3b286cdf-f5f9-4ad2-ab37-0e4697309be5\") " Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.113406 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-kube-api-access-rnk2d" (OuterVolumeSpecName: "kube-api-access-rnk2d") pod "3b286cdf-f5f9-4ad2-ab37-0e4697309be5" (UID: "3b286cdf-f5f9-4ad2-ab37-0e4697309be5"). InnerVolumeSpecName "kube-api-access-rnk2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.140185 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-inventory" (OuterVolumeSpecName: "inventory") pod "3b286cdf-f5f9-4ad2-ab37-0e4697309be5" (UID: "3b286cdf-f5f9-4ad2-ab37-0e4697309be5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.140667 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3b286cdf-f5f9-4ad2-ab37-0e4697309be5" (UID: "3b286cdf-f5f9-4ad2-ab37-0e4697309be5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.209476 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.209518 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnk2d\" (UniqueName: \"kubernetes.io/projected/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-kube-api-access-rnk2d\") on node \"crc\" DevicePath \"\"" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.209531 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b286cdf-f5f9-4ad2-ab37-0e4697309be5-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.503382 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:50:00 crc kubenswrapper[4788]: E1211 09:50:00.504007 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.600434 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" event={"ID":"3b286cdf-f5f9-4ad2-ab37-0e4697309be5","Type":"ContainerDied","Data":"df1dd63859e1a3dfcd8cdcb53565603bfa31313e7c8d3a1c631699ab6be12ff2"} Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.600485 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df1dd63859e1a3dfcd8cdcb53565603bfa31313e7c8d3a1c631699ab6be12ff2" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.600553 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phwpb" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.683397 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g"] Dec 11 09:50:00 crc kubenswrapper[4788]: E1211 09:50:00.684095 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b286cdf-f5f9-4ad2-ab37-0e4697309be5" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.684124 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b286cdf-f5f9-4ad2-ab37-0e4697309be5" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.684404 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b286cdf-f5f9-4ad2-ab37-0e4697309be5" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.685315 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.689831 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.689904 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.690173 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.690928 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.698783 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g"] Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.822771 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.822850 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jj6jg\" (UniqueName: \"kubernetes.io/projected/f6a40a8b-5427-40c5-b48b-18df0deb1e39-kube-api-access-jj6jg\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.822982 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.823180 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.925715 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.925846 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.925980 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.926023 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jj6jg\" (UniqueName: \"kubernetes.io/projected/f6a40a8b-5427-40c5-b48b-18df0deb1e39-kube-api-access-jj6jg\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.931441 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.934110 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.934780 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:00 crc kubenswrapper[4788]: I1211 09:50:00.949543 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jj6jg\" (UniqueName: \"kubernetes.io/projected/f6a40a8b-5427-40c5-b48b-18df0deb1e39-kube-api-access-jj6jg\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:01 crc kubenswrapper[4788]: I1211 09:50:01.013709 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:50:01 crc kubenswrapper[4788]: I1211 09:50:01.528926 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g"] Dec 11 09:50:01 crc kubenswrapper[4788]: I1211 09:50:01.611548 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" event={"ID":"f6a40a8b-5427-40c5-b48b-18df0deb1e39","Type":"ContainerStarted","Data":"21c76eef0543ebfa5f9d31799ce5f75a69c7a8ec689c322d445561d0defcfd37"} Dec 11 09:50:05 crc kubenswrapper[4788]: I1211 09:50:05.652820 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" event={"ID":"f6a40a8b-5427-40c5-b48b-18df0deb1e39","Type":"ContainerStarted","Data":"c47288228b4dbe37c81f701a0b64f632a74cf4520d8cc54bab6f78051e2a032a"} Dec 11 09:50:05 crc kubenswrapper[4788]: I1211 09:50:05.680863 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" podStartSLOduration=2.945378202 podStartE2EDuration="5.680844674s" podCreationTimestamp="2025-12-11 09:50:00 +0000 UTC" firstStartedPulling="2025-12-11 09:50:01.535121398 +0000 UTC m=+1731.605900984" lastFinishedPulling="2025-12-11 09:50:04.27058787 +0000 UTC m=+1734.341367456" observedRunningTime="2025-12-11 09:50:05.678396482 +0000 UTC m=+1735.749176068" watchObservedRunningTime="2025-12-11 09:50:05.680844674 +0000 UTC m=+1735.751624260" Dec 11 09:50:12 crc kubenswrapper[4788]: I1211 09:50:12.496193 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:50:12 crc kubenswrapper[4788]: E1211 09:50:12.497183 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:50:26 crc kubenswrapper[4788]: I1211 09:50:26.496170 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:50:26 crc kubenswrapper[4788]: E1211 09:50:26.498841 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:50:41 crc kubenswrapper[4788]: I1211 09:50:41.495968 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:50:41 crc kubenswrapper[4788]: E1211 09:50:41.496776 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:50:44 crc kubenswrapper[4788]: I1211 09:50:44.288132 4788 scope.go:117] "RemoveContainer" containerID="d6d56553bff5cb3c9407e9f4faa59907e6c863edd3b016168a4d3db8354cef48" Dec 11 09:50:44 crc kubenswrapper[4788]: I1211 09:50:44.341727 4788 scope.go:117] "RemoveContainer" containerID="99543d6c3a77fd0ecc62fb13a4a683462c46fc9d0453d2bf1fddc07a1f282ef1" Dec 11 09:50:56 crc kubenswrapper[4788]: I1211 09:50:56.496067 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:50:56 crc kubenswrapper[4788]: E1211 09:50:56.498009 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:51:07 crc kubenswrapper[4788]: I1211 09:51:07.495642 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:51:07 crc kubenswrapper[4788]: E1211 09:51:07.496476 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:51:18 crc kubenswrapper[4788]: I1211 09:51:18.495761 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:51:18 crc kubenswrapper[4788]: E1211 09:51:18.496517 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:51:29 crc kubenswrapper[4788]: I1211 09:51:29.495513 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:51:29 crc kubenswrapper[4788]: E1211 09:51:29.496405 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:51:39 crc kubenswrapper[4788]: I1211 09:51:39.046749 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-9a29-account-create-update-qv8wk"] Dec 11 09:51:39 crc kubenswrapper[4788]: I1211 09:51:39.057178 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-jl2kd"] Dec 11 09:51:39 crc kubenswrapper[4788]: I1211 09:51:39.066420 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-9a29-account-create-update-qv8wk"] Dec 11 09:51:39 crc kubenswrapper[4788]: I1211 09:51:39.075406 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-jl2kd"] Dec 11 09:51:40 crc kubenswrapper[4788]: I1211 09:51:40.515328 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="776102e1-f9bb-46db-bf9e-b3171a8d64fa" path="/var/lib/kubelet/pods/776102e1-f9bb-46db-bf9e-b3171a8d64fa/volumes" Dec 11 09:51:40 crc kubenswrapper[4788]: I1211 09:51:40.516497 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d15af45-a8f2-4689-a633-82abfcd03bae" path="/var/lib/kubelet/pods/8d15af45-a8f2-4689-a633-82abfcd03bae/volumes" Dec 11 09:51:41 crc kubenswrapper[4788]: I1211 09:51:41.496626 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:51:41 crc kubenswrapper[4788]: E1211 09:51:41.497054 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.041155 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-s98g2"] Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.056484 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-s98g2"] Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.070815 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-26ad-account-create-update-q7plk"] Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.082687 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-mlxpj"] Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.094366 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-26ad-account-create-update-q7plk"] Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.108249 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-mlxpj"] Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.120074 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-5ce6-account-create-update-4dlzg"] Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.132332 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-5ce6-account-create-update-4dlzg"] Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.510961 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="638ccdd0-d9e8-4608-9eb9-eb3abebae04d" path="/var/lib/kubelet/pods/638ccdd0-d9e8-4608-9eb9-eb3abebae04d/volumes" Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.512657 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02" path="/var/lib/kubelet/pods/7a40a6f3-d7b0-4064-9e58-aea4c9a8bd02/volumes" Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.513397 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac89feb3-6e9d-4d4e-bfbc-313328583a65" path="/var/lib/kubelet/pods/ac89feb3-6e9d-4d4e-bfbc-313328583a65/volumes" Dec 11 09:51:42 crc kubenswrapper[4788]: I1211 09:51:42.514137 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bceadd92-112d-46ef-bfa8-4fd844c01ebf" path="/var/lib/kubelet/pods/bceadd92-112d-46ef-bfa8-4fd844c01ebf/volumes" Dec 11 09:51:44 crc kubenswrapper[4788]: I1211 09:51:44.390656 4788 scope.go:117] "RemoveContainer" containerID="ec51dad2a93538ebcb2573c2a4b32c519ff312933347e7af2b685035037750c8" Dec 11 09:51:44 crc kubenswrapper[4788]: I1211 09:51:44.441825 4788 scope.go:117] "RemoveContainer" containerID="ff25c34ffa144204887874c798aa10d9d501f8ab34e811a4b67e67006e1559c3" Dec 11 09:51:44 crc kubenswrapper[4788]: I1211 09:51:44.486843 4788 scope.go:117] "RemoveContainer" containerID="8ac0b13f2c9eb948be42f6eed506390e7465eda044ed69b7f08b60752cf99e63" Dec 11 09:51:44 crc kubenswrapper[4788]: I1211 09:51:44.530528 4788 scope.go:117] "RemoveContainer" containerID="e4481afdbb3294155dce49e6a29f6a575c6a692d7b5f5f554c98bb8013149901" Dec 11 09:51:44 crc kubenswrapper[4788]: I1211 09:51:44.583076 4788 scope.go:117] "RemoveContainer" containerID="300d96478e62c8fae7b5ec769f1ba6cca4a1d1e3686c00d0a3abccd805eb1e86" Dec 11 09:51:44 crc kubenswrapper[4788]: I1211 09:51:44.637750 4788 scope.go:117] "RemoveContainer" containerID="da74c77e7b5a3e51c3553089c60f51237fa7228e2ea0e205ca94c39810e79262" Dec 11 09:51:53 crc kubenswrapper[4788]: I1211 09:51:53.496454 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:51:53 crc kubenswrapper[4788]: E1211 09:51:53.497296 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:52:08 crc kubenswrapper[4788]: I1211 09:52:08.497158 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:52:08 crc kubenswrapper[4788]: E1211 09:52:08.500274 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:52:21 crc kubenswrapper[4788]: I1211 09:52:21.496700 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:52:21 crc kubenswrapper[4788]: E1211 09:52:21.497572 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.058287 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-hfflh"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.075763 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5988-account-create-update-lfdwj"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.087907 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-hfflh"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.100618 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5988-account-create-update-lfdwj"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.109308 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-z48xt"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.117822 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-297b-account-create-update-4fsxw"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.125986 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-v4wgz"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.135301 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-z48xt"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.144913 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-75ef-account-create-update-k799k"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.154175 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-297b-account-create-update-4fsxw"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.164602 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-v4wgz"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.174339 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-75ef-account-create-update-k799k"] Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.508736 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c51946a-22cd-4576-8c5d-a37ec2746c2e" path="/var/lib/kubelet/pods/0c51946a-22cd-4576-8c5d-a37ec2746c2e/volumes" Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.509643 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c99537b-1ec9-4cf3-8fc8-12466dcca552" path="/var/lib/kubelet/pods/1c99537b-1ec9-4cf3-8fc8-12466dcca552/volumes" Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.510376 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d51ecca-226a-42a2-ad6c-249628ee18b7" path="/var/lib/kubelet/pods/3d51ecca-226a-42a2-ad6c-249628ee18b7/volumes" Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.511168 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d73232b-acdd-4586-a02d-e2f22ce3e0fa" path="/var/lib/kubelet/pods/7d73232b-acdd-4586-a02d-e2f22ce3e0fa/volumes" Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.512512 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="827fe8ce-f75e-4f71-b2d6-5d791513c568" path="/var/lib/kubelet/pods/827fe8ce-f75e-4f71-b2d6-5d791513c568/volumes" Dec 11 09:52:22 crc kubenswrapper[4788]: I1211 09:52:22.513309 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed92d625-0e3b-43c8-9a8b-c228c701bcba" path="/var/lib/kubelet/pods/ed92d625-0e3b-43c8-9a8b-c228c701bcba/volumes" Dec 11 09:52:28 crc kubenswrapper[4788]: I1211 09:52:28.066444 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-5xddv"] Dec 11 09:52:28 crc kubenswrapper[4788]: I1211 09:52:28.077141 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-5xddv"] Dec 11 09:52:28 crc kubenswrapper[4788]: I1211 09:52:28.516088 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97b7b5ca-b073-49a3-9dc5-adce0490e178" path="/var/lib/kubelet/pods/97b7b5ca-b073-49a3-9dc5-adce0490e178/volumes" Dec 11 09:52:34 crc kubenswrapper[4788]: I1211 09:52:34.495931 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:52:34 crc kubenswrapper[4788]: E1211 09:52:34.496817 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:52:44 crc kubenswrapper[4788]: I1211 09:52:44.764297 4788 scope.go:117] "RemoveContainer" containerID="d43cb1543e46a6eb2467ef962404dcf11cd186351ebd33a7e0fe1260bd58c483" Dec 11 09:52:44 crc kubenswrapper[4788]: I1211 09:52:44.808256 4788 scope.go:117] "RemoveContainer" containerID="72fc5943315c2041e20bcf5ae019cd3bd4440e10eeabb9eb4303e55d716145d6" Dec 11 09:52:44 crc kubenswrapper[4788]: I1211 09:52:44.846395 4788 scope.go:117] "RemoveContainer" containerID="b198e42c1dd45ce312a2f26ca1e387b7f2620b5d4497a25c9c2fef3d916bde4d" Dec 11 09:52:44 crc kubenswrapper[4788]: I1211 09:52:44.921522 4788 scope.go:117] "RemoveContainer" containerID="d5786b3fa9a6548c1a2aa04befb2e46173d79c9b07e494e81d325c104e7d172a" Dec 11 09:52:44 crc kubenswrapper[4788]: I1211 09:52:44.949154 4788 scope.go:117] "RemoveContainer" containerID="b7b8821bb730ec9501afae94f6a6cec12b6ed8470388fa4974538c738a70194d" Dec 11 09:52:45 crc kubenswrapper[4788]: I1211 09:52:45.012355 4788 scope.go:117] "RemoveContainer" containerID="2d136c1b53ae68cb4ce06cf91963a9afda464820ec3bd40ae0502caee8ea0fc4" Dec 11 09:52:45 crc kubenswrapper[4788]: I1211 09:52:45.058852 4788 scope.go:117] "RemoveContainer" containerID="8fa6913348e837373aee881b298c02ed31980a4df254562f431cabb1a4f7309d" Dec 11 09:52:48 crc kubenswrapper[4788]: I1211 09:52:48.496515 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:52:48 crc kubenswrapper[4788]: E1211 09:52:48.497384 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:53:01 crc kubenswrapper[4788]: I1211 09:53:01.496092 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:53:02 crc kubenswrapper[4788]: I1211 09:53:02.335199 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"bbbc1e80b9d3d5c0ee1a3b47244845f7033d53a0a229c5bae77d253cde3079f8"} Dec 11 09:53:25 crc kubenswrapper[4788]: I1211 09:53:25.556634 4788 generic.go:334] "Generic (PLEG): container finished" podID="f6a40a8b-5427-40c5-b48b-18df0deb1e39" containerID="c47288228b4dbe37c81f701a0b64f632a74cf4520d8cc54bab6f78051e2a032a" exitCode=0 Dec 11 09:53:25 crc kubenswrapper[4788]: I1211 09:53:25.556725 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" event={"ID":"f6a40a8b-5427-40c5-b48b-18df0deb1e39","Type":"ContainerDied","Data":"c47288228b4dbe37c81f701a0b64f632a74cf4520d8cc54bab6f78051e2a032a"} Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.331643 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.423966 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-inventory\") pod \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.424013 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-bootstrap-combined-ca-bundle\") pod \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.424158 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jj6jg\" (UniqueName: \"kubernetes.io/projected/f6a40a8b-5427-40c5-b48b-18df0deb1e39-kube-api-access-jj6jg\") pod \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.424277 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-ssh-key\") pod \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\" (UID: \"f6a40a8b-5427-40c5-b48b-18df0deb1e39\") " Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.430786 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6a40a8b-5427-40c5-b48b-18df0deb1e39-kube-api-access-jj6jg" (OuterVolumeSpecName: "kube-api-access-jj6jg") pod "f6a40a8b-5427-40c5-b48b-18df0deb1e39" (UID: "f6a40a8b-5427-40c5-b48b-18df0deb1e39"). InnerVolumeSpecName "kube-api-access-jj6jg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.431949 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f6a40a8b-5427-40c5-b48b-18df0deb1e39" (UID: "f6a40a8b-5427-40c5-b48b-18df0deb1e39"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.455243 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f6a40a8b-5427-40c5-b48b-18df0deb1e39" (UID: "f6a40a8b-5427-40c5-b48b-18df0deb1e39"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.455735 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-inventory" (OuterVolumeSpecName: "inventory") pod "f6a40a8b-5427-40c5-b48b-18df0deb1e39" (UID: "f6a40a8b-5427-40c5-b48b-18df0deb1e39"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.527150 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jj6jg\" (UniqueName: \"kubernetes.io/projected/f6a40a8b-5427-40c5-b48b-18df0deb1e39-kube-api-access-jj6jg\") on node \"crc\" DevicePath \"\"" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.527251 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.527268 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.527281 4788 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6a40a8b-5427-40c5-b48b-18df0deb1e39-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.687471 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d"] Dec 11 09:53:27 crc kubenswrapper[4788]: E1211 09:53:27.688313 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6a40a8b-5427-40c5-b48b-18df0deb1e39" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.688332 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6a40a8b-5427-40c5-b48b-18df0deb1e39" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.688809 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6a40a8b-5427-40c5-b48b-18df0deb1e39" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.689746 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.718477 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d"] Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.730711 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.730800 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f7g4\" (UniqueName: \"kubernetes.io/projected/8cceb164-ffe1-4a11-83e9-888f72ad58f0-kube-api-access-8f7g4\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.731063 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.833568 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f7g4\" (UniqueName: \"kubernetes.io/projected/8cceb164-ffe1-4a11-83e9-888f72ad58f0-kube-api-access-8f7g4\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.833677 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.833778 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.838016 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.838408 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.855951 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f7g4\" (UniqueName: \"kubernetes.io/projected/8cceb164-ffe1-4a11-83e9-888f72ad58f0-kube-api-access-8f7g4\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.935005 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" event={"ID":"f6a40a8b-5427-40c5-b48b-18df0deb1e39","Type":"ContainerDied","Data":"21c76eef0543ebfa5f9d31799ce5f75a69c7a8ec689c322d445561d0defcfd37"} Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.935051 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21c76eef0543ebfa5f9d31799ce5f75a69c7a8ec689c322d445561d0defcfd37" Dec 11 09:53:27 crc kubenswrapper[4788]: I1211 09:53:27.935063 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g" Dec 11 09:53:28 crc kubenswrapper[4788]: I1211 09:53:28.017476 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:53:28 crc kubenswrapper[4788]: I1211 09:53:28.595724 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d"] Dec 11 09:53:28 crc kubenswrapper[4788]: I1211 09:53:28.946885 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" event={"ID":"8cceb164-ffe1-4a11-83e9-888f72ad58f0","Type":"ContainerStarted","Data":"e5e5f55f1c7db1ca272a17fdd4dacb8c8e738baaa8b597d26f9c04a40b349d47"} Dec 11 09:53:29 crc kubenswrapper[4788]: I1211 09:53:29.957462 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" event={"ID":"8cceb164-ffe1-4a11-83e9-888f72ad58f0","Type":"ContainerStarted","Data":"f930dd98c57a0e87fc74e31389f7a1e524e5a045e3227a0b5ca8956ca384b64d"} Dec 11 09:53:29 crc kubenswrapper[4788]: I1211 09:53:29.979915 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" podStartSLOduration=2.762509009 podStartE2EDuration="2.979895201s" podCreationTimestamp="2025-12-11 09:53:27 +0000 UTC" firstStartedPulling="2025-12-11 09:53:28.602983037 +0000 UTC m=+1938.673762623" lastFinishedPulling="2025-12-11 09:53:28.820369229 +0000 UTC m=+1938.891148815" observedRunningTime="2025-12-11 09:53:29.972379609 +0000 UTC m=+1940.043159205" watchObservedRunningTime="2025-12-11 09:53:29.979895201 +0000 UTC m=+1940.050674777" Dec 11 09:53:40 crc kubenswrapper[4788]: I1211 09:53:40.044321 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-qfqsj"] Dec 11 09:53:40 crc kubenswrapper[4788]: I1211 09:53:40.053964 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-tq2kw"] Dec 11 09:53:40 crc kubenswrapper[4788]: I1211 09:53:40.090073 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-qfqsj"] Dec 11 09:53:40 crc kubenswrapper[4788]: I1211 09:53:40.100440 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-tq2kw"] Dec 11 09:53:40 crc kubenswrapper[4788]: I1211 09:53:40.507663 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2f70f9c-2c54-4a03-adbc-af34bf91a63b" path="/var/lib/kubelet/pods/d2f70f9c-2c54-4a03-adbc-af34bf91a63b/volumes" Dec 11 09:53:40 crc kubenswrapper[4788]: I1211 09:53:40.508396 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed0ae66f-f028-4fd2-9617-37f1148ec651" path="/var/lib/kubelet/pods/ed0ae66f-f028-4fd2-9617-37f1148ec651/volumes" Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.525205 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rf478"] Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.527529 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.536922 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rf478"] Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.656444 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wfhj\" (UniqueName: \"kubernetes.io/projected/5aa6985d-a538-4476-8aa5-38ccb947cb50-kube-api-access-9wfhj\") pod \"certified-operators-rf478\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.656575 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-utilities\") pod \"certified-operators-rf478\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.656623 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-catalog-content\") pod \"certified-operators-rf478\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.758718 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wfhj\" (UniqueName: \"kubernetes.io/projected/5aa6985d-a538-4476-8aa5-38ccb947cb50-kube-api-access-9wfhj\") pod \"certified-operators-rf478\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.758827 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-utilities\") pod \"certified-operators-rf478\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.758902 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-catalog-content\") pod \"certified-operators-rf478\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.759693 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-catalog-content\") pod \"certified-operators-rf478\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.760102 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-utilities\") pod \"certified-operators-rf478\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.789298 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wfhj\" (UniqueName: \"kubernetes.io/projected/5aa6985d-a538-4476-8aa5-38ccb947cb50-kube-api-access-9wfhj\") pod \"certified-operators-rf478\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:53:42 crc kubenswrapper[4788]: I1211 09:53:42.854609 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:53:43 crc kubenswrapper[4788]: I1211 09:53:43.436832 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rf478"] Dec 11 09:53:44 crc kubenswrapper[4788]: I1211 09:53:44.133017 4788 generic.go:334] "Generic (PLEG): container finished" podID="5aa6985d-a538-4476-8aa5-38ccb947cb50" containerID="3a4bf6f8bbe14018ea73861e1f1997316585c585a2c7b3897960db6ba64f4bbc" exitCode=0 Dec 11 09:53:44 crc kubenswrapper[4788]: I1211 09:53:44.133129 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf478" event={"ID":"5aa6985d-a538-4476-8aa5-38ccb947cb50","Type":"ContainerDied","Data":"3a4bf6f8bbe14018ea73861e1f1997316585c585a2c7b3897960db6ba64f4bbc"} Dec 11 09:53:44 crc kubenswrapper[4788]: I1211 09:53:44.133166 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf478" event={"ID":"5aa6985d-a538-4476-8aa5-38ccb947cb50","Type":"ContainerStarted","Data":"3196e3828f94ec3a3fff29f55f0c90116d3e3c41558b0a04a39ccf6a256bc3d5"} Dec 11 09:53:44 crc kubenswrapper[4788]: I1211 09:53:44.931435 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nv99f"] Dec 11 09:53:44 crc kubenswrapper[4788]: I1211 09:53:44.941173 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:53:44 crc kubenswrapper[4788]: I1211 09:53:44.942956 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nv99f"] Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.009293 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-catalog-content\") pod \"redhat-marketplace-nv99f\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.009381 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76fbb\" (UniqueName: \"kubernetes.io/projected/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-kube-api-access-76fbb\") pod \"redhat-marketplace-nv99f\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.009857 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-utilities\") pod \"redhat-marketplace-nv99f\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.112010 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-catalog-content\") pod \"redhat-marketplace-nv99f\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.112086 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76fbb\" (UniqueName: \"kubernetes.io/projected/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-kube-api-access-76fbb\") pod \"redhat-marketplace-nv99f\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.112214 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-utilities\") pod \"redhat-marketplace-nv99f\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.112608 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-catalog-content\") pod \"redhat-marketplace-nv99f\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.112667 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-utilities\") pod \"redhat-marketplace-nv99f\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.135606 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76fbb\" (UniqueName: \"kubernetes.io/projected/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-kube-api-access-76fbb\") pod \"redhat-marketplace-nv99f\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.237560 4788 scope.go:117] "RemoveContainer" containerID="ec200f33775fdcd3eabce8f962f5c0e6efc74869f3e1d59025dd4044cc0ed420" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.276939 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.320621 4788 scope.go:117] "RemoveContainer" containerID="0aaa8107acc882b556c26a9d7b90f94920650804284bf1b4bb9f0c8b89899851" Dec 11 09:53:45 crc kubenswrapper[4788]: I1211 09:53:45.978264 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nv99f"] Dec 11 09:53:45 crc kubenswrapper[4788]: W1211 09:53:45.991761 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeddab76a_ffc7_4c44_b0bf_65e38cf830b6.slice/crio-a2f46c3cfc8a222bba583f60dfd3760ab0f48ff3a7bfc1fbcf757e0af0f0925f WatchSource:0}: Error finding container a2f46c3cfc8a222bba583f60dfd3760ab0f48ff3a7bfc1fbcf757e0af0f0925f: Status 404 returned error can't find the container with id a2f46c3cfc8a222bba583f60dfd3760ab0f48ff3a7bfc1fbcf757e0af0f0925f Dec 11 09:53:46 crc kubenswrapper[4788]: I1211 09:53:46.156901 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv99f" event={"ID":"eddab76a-ffc7-4c44-b0bf-65e38cf830b6","Type":"ContainerStarted","Data":"a2f46c3cfc8a222bba583f60dfd3760ab0f48ff3a7bfc1fbcf757e0af0f0925f"} Dec 11 09:53:47 crc kubenswrapper[4788]: I1211 09:53:47.170251 4788 generic.go:334] "Generic (PLEG): container finished" podID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" containerID="6294e1626f628fecb51f11a7f32e7d4281c90f51e9048589391067890f87595e" exitCode=0 Dec 11 09:53:47 crc kubenswrapper[4788]: I1211 09:53:47.170358 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv99f" event={"ID":"eddab76a-ffc7-4c44-b0bf-65e38cf830b6","Type":"ContainerDied","Data":"6294e1626f628fecb51f11a7f32e7d4281c90f51e9048589391067890f87595e"} Dec 11 09:53:49 crc kubenswrapper[4788]: I1211 09:53:49.036575 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-bkcpt"] Dec 11 09:53:49 crc kubenswrapper[4788]: I1211 09:53:49.046039 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-bkcpt"] Dec 11 09:53:50 crc kubenswrapper[4788]: I1211 09:53:50.507209 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="539632da-1b63-429c-9d5c-3be34a9457ad" path="/var/lib/kubelet/pods/539632da-1b63-429c-9d5c-3be34a9457ad/volumes" Dec 11 09:53:51 crc kubenswrapper[4788]: I1211 09:53:51.211777 4788 generic.go:334] "Generic (PLEG): container finished" podID="5aa6985d-a538-4476-8aa5-38ccb947cb50" containerID="1e3ebdd4464cadb2dd9277be12572b50ef79c44bc51ecfeb251b615395216b1b" exitCode=0 Dec 11 09:53:51 crc kubenswrapper[4788]: I1211 09:53:51.211865 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf478" event={"ID":"5aa6985d-a538-4476-8aa5-38ccb947cb50","Type":"ContainerDied","Data":"1e3ebdd4464cadb2dd9277be12572b50ef79c44bc51ecfeb251b615395216b1b"} Dec 11 09:53:51 crc kubenswrapper[4788]: I1211 09:53:51.215067 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv99f" event={"ID":"eddab76a-ffc7-4c44-b0bf-65e38cf830b6","Type":"ContainerStarted","Data":"37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f"} Dec 11 09:53:55 crc kubenswrapper[4788]: I1211 09:53:55.281613 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf478" event={"ID":"5aa6985d-a538-4476-8aa5-38ccb947cb50","Type":"ContainerStarted","Data":"20b80b8bcd33abb3f0d53447d58c64b096264d67adf657bbb0f694333160bc41"} Dec 11 09:53:55 crc kubenswrapper[4788]: I1211 09:53:55.285866 4788 generic.go:334] "Generic (PLEG): container finished" podID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" containerID="37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f" exitCode=0 Dec 11 09:53:55 crc kubenswrapper[4788]: I1211 09:53:55.285911 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv99f" event={"ID":"eddab76a-ffc7-4c44-b0bf-65e38cf830b6","Type":"ContainerDied","Data":"37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f"} Dec 11 09:53:55 crc kubenswrapper[4788]: I1211 09:53:55.306623 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rf478" podStartSLOduration=2.5582763440000003 podStartE2EDuration="13.306594359s" podCreationTimestamp="2025-12-11 09:53:42 +0000 UTC" firstStartedPulling="2025-12-11 09:53:44.136606053 +0000 UTC m=+1954.207385639" lastFinishedPulling="2025-12-11 09:53:54.884924058 +0000 UTC m=+1964.955703654" observedRunningTime="2025-12-11 09:53:55.302195307 +0000 UTC m=+1965.372974913" watchObservedRunningTime="2025-12-11 09:53:55.306594359 +0000 UTC m=+1965.377373945" Dec 11 09:53:56 crc kubenswrapper[4788]: I1211 09:53:56.296965 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv99f" event={"ID":"eddab76a-ffc7-4c44-b0bf-65e38cf830b6","Type":"ContainerStarted","Data":"9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4"} Dec 11 09:53:56 crc kubenswrapper[4788]: I1211 09:53:56.321787 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nv99f" podStartSLOduration=6.506842678 podStartE2EDuration="12.321768732s" podCreationTimestamp="2025-12-11 09:53:44 +0000 UTC" firstStartedPulling="2025-12-11 09:53:49.945478295 +0000 UTC m=+1960.016257881" lastFinishedPulling="2025-12-11 09:53:55.760404349 +0000 UTC m=+1965.831183935" observedRunningTime="2025-12-11 09:53:56.319095214 +0000 UTC m=+1966.389874800" watchObservedRunningTime="2025-12-11 09:53:56.321768732 +0000 UTC m=+1966.392548318" Dec 11 09:54:02 crc kubenswrapper[4788]: I1211 09:54:02.855275 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:54:02 crc kubenswrapper[4788]: I1211 09:54:02.857135 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:54:02 crc kubenswrapper[4788]: I1211 09:54:02.911119 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:54:03 crc kubenswrapper[4788]: I1211 09:54:03.405438 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rf478" Dec 11 09:54:03 crc kubenswrapper[4788]: I1211 09:54:03.472735 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rf478"] Dec 11 09:54:03 crc kubenswrapper[4788]: I1211 09:54:03.524666 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dx56s"] Dec 11 09:54:03 crc kubenswrapper[4788]: I1211 09:54:03.524887 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dx56s" podUID="8c91a81d-da68-4da8-9081-38be3d8f3213" containerName="registry-server" containerID="cri-o://cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2" gracePeriod=2 Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.042702 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.056655 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-8k66n"] Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.081770 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-8k66n"] Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.232737 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-catalog-content\") pod \"8c91a81d-da68-4da8-9081-38be3d8f3213\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.232969 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndgdh\" (UniqueName: \"kubernetes.io/projected/8c91a81d-da68-4da8-9081-38be3d8f3213-kube-api-access-ndgdh\") pod \"8c91a81d-da68-4da8-9081-38be3d8f3213\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.233125 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-utilities\") pod \"8c91a81d-da68-4da8-9081-38be3d8f3213\" (UID: \"8c91a81d-da68-4da8-9081-38be3d8f3213\") " Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.234683 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-utilities" (OuterVolumeSpecName: "utilities") pod "8c91a81d-da68-4da8-9081-38be3d8f3213" (UID: "8c91a81d-da68-4da8-9081-38be3d8f3213"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.238955 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c91a81d-da68-4da8-9081-38be3d8f3213-kube-api-access-ndgdh" (OuterVolumeSpecName: "kube-api-access-ndgdh") pod "8c91a81d-da68-4da8-9081-38be3d8f3213" (UID: "8c91a81d-da68-4da8-9081-38be3d8f3213"). InnerVolumeSpecName "kube-api-access-ndgdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.300428 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c91a81d-da68-4da8-9081-38be3d8f3213" (UID: "8c91a81d-da68-4da8-9081-38be3d8f3213"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.335627 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.335677 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndgdh\" (UniqueName: \"kubernetes.io/projected/8c91a81d-da68-4da8-9081-38be3d8f3213-kube-api-access-ndgdh\") on node \"crc\" DevicePath \"\"" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.335692 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c91a81d-da68-4da8-9081-38be3d8f3213-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.371404 4788 generic.go:334] "Generic (PLEG): container finished" podID="8c91a81d-da68-4da8-9081-38be3d8f3213" containerID="cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2" exitCode=0 Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.372549 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dx56s" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.384508 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dx56s" event={"ID":"8c91a81d-da68-4da8-9081-38be3d8f3213","Type":"ContainerDied","Data":"cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2"} Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.385247 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dx56s" event={"ID":"8c91a81d-da68-4da8-9081-38be3d8f3213","Type":"ContainerDied","Data":"6945395b276c8dea07782c25b50f1b890e983bfdeb3fa9274da77700b9c5a4bd"} Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.385295 4788 scope.go:117] "RemoveContainer" containerID="cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.430019 4788 scope.go:117] "RemoveContainer" containerID="725d73f431f3eebd1af03aac5b5a0b216bba6dacbbf19c9cf446899388afda5b" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.433334 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dx56s"] Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.443732 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dx56s"] Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.488322 4788 scope.go:117] "RemoveContainer" containerID="f5fa1136d21b5b7879f56289ccbc6cbbaa4e9e6c1e1d54377bb2c569e9a6b7b7" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.507258 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c91a81d-da68-4da8-9081-38be3d8f3213" path="/var/lib/kubelet/pods/8c91a81d-da68-4da8-9081-38be3d8f3213/volumes" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.508137 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed639405-5b9d-491f-b673-3d0657a14ecf" path="/var/lib/kubelet/pods/ed639405-5b9d-491f-b673-3d0657a14ecf/volumes" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.512525 4788 scope.go:117] "RemoveContainer" containerID="cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2" Dec 11 09:54:04 crc kubenswrapper[4788]: E1211 09:54:04.512952 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2\": container with ID starting with cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2 not found: ID does not exist" containerID="cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.513019 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2"} err="failed to get container status \"cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2\": rpc error: code = NotFound desc = could not find container \"cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2\": container with ID starting with cbb24f88a31f515664ed8c0510c192764d3dc33974b54cd57d047475c351eac2 not found: ID does not exist" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.513062 4788 scope.go:117] "RemoveContainer" containerID="725d73f431f3eebd1af03aac5b5a0b216bba6dacbbf19c9cf446899388afda5b" Dec 11 09:54:04 crc kubenswrapper[4788]: E1211 09:54:04.513580 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"725d73f431f3eebd1af03aac5b5a0b216bba6dacbbf19c9cf446899388afda5b\": container with ID starting with 725d73f431f3eebd1af03aac5b5a0b216bba6dacbbf19c9cf446899388afda5b not found: ID does not exist" containerID="725d73f431f3eebd1af03aac5b5a0b216bba6dacbbf19c9cf446899388afda5b" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.513610 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"725d73f431f3eebd1af03aac5b5a0b216bba6dacbbf19c9cf446899388afda5b"} err="failed to get container status \"725d73f431f3eebd1af03aac5b5a0b216bba6dacbbf19c9cf446899388afda5b\": rpc error: code = NotFound desc = could not find container \"725d73f431f3eebd1af03aac5b5a0b216bba6dacbbf19c9cf446899388afda5b\": container with ID starting with 725d73f431f3eebd1af03aac5b5a0b216bba6dacbbf19c9cf446899388afda5b not found: ID does not exist" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.513632 4788 scope.go:117] "RemoveContainer" containerID="f5fa1136d21b5b7879f56289ccbc6cbbaa4e9e6c1e1d54377bb2c569e9a6b7b7" Dec 11 09:54:04 crc kubenswrapper[4788]: E1211 09:54:04.513969 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5fa1136d21b5b7879f56289ccbc6cbbaa4e9e6c1e1d54377bb2c569e9a6b7b7\": container with ID starting with f5fa1136d21b5b7879f56289ccbc6cbbaa4e9e6c1e1d54377bb2c569e9a6b7b7 not found: ID does not exist" containerID="f5fa1136d21b5b7879f56289ccbc6cbbaa4e9e6c1e1d54377bb2c569e9a6b7b7" Dec 11 09:54:04 crc kubenswrapper[4788]: I1211 09:54:04.514014 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5fa1136d21b5b7879f56289ccbc6cbbaa4e9e6c1e1d54377bb2c569e9a6b7b7"} err="failed to get container status \"f5fa1136d21b5b7879f56289ccbc6cbbaa4e9e6c1e1d54377bb2c569e9a6b7b7\": rpc error: code = NotFound desc = could not find container \"f5fa1136d21b5b7879f56289ccbc6cbbaa4e9e6c1e1d54377bb2c569e9a6b7b7\": container with ID starting with f5fa1136d21b5b7879f56289ccbc6cbbaa4e9e6c1e1d54377bb2c569e9a6b7b7 not found: ID does not exist" Dec 11 09:54:05 crc kubenswrapper[4788]: I1211 09:54:05.277361 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:54:05 crc kubenswrapper[4788]: I1211 09:54:05.277421 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:54:05 crc kubenswrapper[4788]: I1211 09:54:05.343187 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:54:05 crc kubenswrapper[4788]: I1211 09:54:05.429750 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:54:07 crc kubenswrapper[4788]: I1211 09:54:07.351383 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nv99f"] Dec 11 09:54:07 crc kubenswrapper[4788]: I1211 09:54:07.400326 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nv99f" podUID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" containerName="registry-server" containerID="cri-o://9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4" gracePeriod=2 Dec 11 09:54:07 crc kubenswrapper[4788]: I1211 09:54:07.846854 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.013045 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76fbb\" (UniqueName: \"kubernetes.io/projected/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-kube-api-access-76fbb\") pod \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.013139 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-utilities\") pod \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.013272 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-catalog-content\") pod \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\" (UID: \"eddab76a-ffc7-4c44-b0bf-65e38cf830b6\") " Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.014158 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-utilities" (OuterVolumeSpecName: "utilities") pod "eddab76a-ffc7-4c44-b0bf-65e38cf830b6" (UID: "eddab76a-ffc7-4c44-b0bf-65e38cf830b6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.021458 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-kube-api-access-76fbb" (OuterVolumeSpecName: "kube-api-access-76fbb") pod "eddab76a-ffc7-4c44-b0bf-65e38cf830b6" (UID: "eddab76a-ffc7-4c44-b0bf-65e38cf830b6"). InnerVolumeSpecName "kube-api-access-76fbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.041058 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eddab76a-ffc7-4c44-b0bf-65e38cf830b6" (UID: "eddab76a-ffc7-4c44-b0bf-65e38cf830b6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.045149 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-scdh4"] Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.079755 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-scdh4"] Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.115680 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.115721 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.115835 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76fbb\" (UniqueName: \"kubernetes.io/projected/eddab76a-ffc7-4c44-b0bf-65e38cf830b6-kube-api-access-76fbb\") on node \"crc\" DevicePath \"\"" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.415242 4788 generic.go:334] "Generic (PLEG): container finished" podID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" containerID="9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4" exitCode=0 Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.415316 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nv99f" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.415280 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv99f" event={"ID":"eddab76a-ffc7-4c44-b0bf-65e38cf830b6","Type":"ContainerDied","Data":"9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4"} Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.415441 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv99f" event={"ID":"eddab76a-ffc7-4c44-b0bf-65e38cf830b6","Type":"ContainerDied","Data":"a2f46c3cfc8a222bba583f60dfd3760ab0f48ff3a7bfc1fbcf757e0af0f0925f"} Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.415493 4788 scope.go:117] "RemoveContainer" containerID="9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.449918 4788 scope.go:117] "RemoveContainer" containerID="37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.454824 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nv99f"] Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.463162 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nv99f"] Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.485417 4788 scope.go:117] "RemoveContainer" containerID="6294e1626f628fecb51f11a7f32e7d4281c90f51e9048589391067890f87595e" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.508409 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="169bf402-16b5-4aa7-838a-094a2e4c3330" path="/var/lib/kubelet/pods/169bf402-16b5-4aa7-838a-094a2e4c3330/volumes" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.509176 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" path="/var/lib/kubelet/pods/eddab76a-ffc7-4c44-b0bf-65e38cf830b6/volumes" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.522144 4788 scope.go:117] "RemoveContainer" containerID="9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4" Dec 11 09:54:08 crc kubenswrapper[4788]: E1211 09:54:08.522778 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4\": container with ID starting with 9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4 not found: ID does not exist" containerID="9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.522883 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4"} err="failed to get container status \"9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4\": rpc error: code = NotFound desc = could not find container \"9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4\": container with ID starting with 9770224522b5846119677c7b97d738cdf92f5fc1c6f44307faee7f2c32aa06c4 not found: ID does not exist" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.522968 4788 scope.go:117] "RemoveContainer" containerID="37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f" Dec 11 09:54:08 crc kubenswrapper[4788]: E1211 09:54:08.523388 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f\": container with ID starting with 37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f not found: ID does not exist" containerID="37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.523472 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f"} err="failed to get container status \"37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f\": rpc error: code = NotFound desc = could not find container \"37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f\": container with ID starting with 37cfb98b5057208650eb70909a9093dc10a30d278f9df1295a8bdae28076096f not found: ID does not exist" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.523534 4788 scope.go:117] "RemoveContainer" containerID="6294e1626f628fecb51f11a7f32e7d4281c90f51e9048589391067890f87595e" Dec 11 09:54:08 crc kubenswrapper[4788]: E1211 09:54:08.524303 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6294e1626f628fecb51f11a7f32e7d4281c90f51e9048589391067890f87595e\": container with ID starting with 6294e1626f628fecb51f11a7f32e7d4281c90f51e9048589391067890f87595e not found: ID does not exist" containerID="6294e1626f628fecb51f11a7f32e7d4281c90f51e9048589391067890f87595e" Dec 11 09:54:08 crc kubenswrapper[4788]: I1211 09:54:08.524344 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6294e1626f628fecb51f11a7f32e7d4281c90f51e9048589391067890f87595e"} err="failed to get container status \"6294e1626f628fecb51f11a7f32e7d4281c90f51e9048589391067890f87595e\": rpc error: code = NotFound desc = could not find container \"6294e1626f628fecb51f11a7f32e7d4281c90f51e9048589391067890f87595e\": container with ID starting with 6294e1626f628fecb51f11a7f32e7d4281c90f51e9048589391067890f87595e not found: ID does not exist" Dec 11 09:54:16 crc kubenswrapper[4788]: I1211 09:54:16.032606 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-bwvb7"] Dec 11 09:54:16 crc kubenswrapper[4788]: I1211 09:54:16.050655 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-bwvb7"] Dec 11 09:54:16 crc kubenswrapper[4788]: I1211 09:54:16.509683 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32f6b143-7465-4311-85d7-d7668fde477c" path="/var/lib/kubelet/pods/32f6b143-7465-4311-85d7-d7668fde477c/volumes" Dec 11 09:54:45 crc kubenswrapper[4788]: I1211 09:54:45.420027 4788 scope.go:117] "RemoveContainer" containerID="ff2151bc49fd0cafd73c9fb2ceb0468e3d2614a208ebd9fe5817ecee62fe6e97" Dec 11 09:54:45 crc kubenswrapper[4788]: I1211 09:54:45.471825 4788 scope.go:117] "RemoveContainer" containerID="99d7855546af3787a9a2209a97ca65d78e41ec27d469523f594380049193aa2a" Dec 11 09:54:45 crc kubenswrapper[4788]: I1211 09:54:45.520940 4788 scope.go:117] "RemoveContainer" containerID="bc7fb4a80917212a2ef2937c19727eb2738a9430480dc85502c129458f85a317" Dec 11 09:54:45 crc kubenswrapper[4788]: I1211 09:54:45.568853 4788 scope.go:117] "RemoveContainer" containerID="86b8455f9f4b87b9234f4d8fe45e0647e65a9a6431e22dc6688e5d81c3af5966" Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.052914 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-dj9dl"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.064988 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-6h62d"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.075492 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-9166-account-create-update-xhgcp"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.085633 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-26f5-account-create-update-25256"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.095215 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-5fjph"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.104511 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-12cc-account-create-update-58lfg"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.113283 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-dj9dl"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.121316 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-6h62d"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.128677 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-9166-account-create-update-xhgcp"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.155295 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-12cc-account-create-update-58lfg"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.167279 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-26f5-account-create-update-25256"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.177264 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-5fjph"] Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.509380 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03b3633f-f696-4f45-be36-8cfb754dc8e1" path="/var/lib/kubelet/pods/03b3633f-f696-4f45-be36-8cfb754dc8e1/volumes" Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.510197 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04da6856-cf02-4691-9b29-7715109b1a69" path="/var/lib/kubelet/pods/04da6856-cf02-4691-9b29-7715109b1a69/volumes" Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.511013 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f1fbc52-492f-4e49-a8d9-b5ab20c0022f" path="/var/lib/kubelet/pods/0f1fbc52-492f-4e49-a8d9-b5ab20c0022f/volumes" Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.511679 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44711a12-aa33-4cf0-a92d-b0039f5ac809" path="/var/lib/kubelet/pods/44711a12-aa33-4cf0-a92d-b0039f5ac809/volumes" Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.513516 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44c661c8-dd11-4b37-9fbe-eead4f672645" path="/var/lib/kubelet/pods/44c661c8-dd11-4b37-9fbe-eead4f672645/volumes" Dec 11 09:55:00 crc kubenswrapper[4788]: I1211 09:55:00.514864 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4305d44-4730-4a83-b91f-b53bbff433bf" path="/var/lib/kubelet/pods/c4305d44-4730-4a83-b91f-b53bbff433bf/volumes" Dec 11 09:55:12 crc kubenswrapper[4788]: I1211 09:55:12.008349 4788 generic.go:334] "Generic (PLEG): container finished" podID="8cceb164-ffe1-4a11-83e9-888f72ad58f0" containerID="f930dd98c57a0e87fc74e31389f7a1e524e5a045e3227a0b5ca8956ca384b64d" exitCode=0 Dec 11 09:55:12 crc kubenswrapper[4788]: I1211 09:55:12.008457 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" event={"ID":"8cceb164-ffe1-4a11-83e9-888f72ad58f0","Type":"ContainerDied","Data":"f930dd98c57a0e87fc74e31389f7a1e524e5a045e3227a0b5ca8956ca384b64d"} Dec 11 09:55:13 crc kubenswrapper[4788]: I1211 09:55:13.568096 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:55:13 crc kubenswrapper[4788]: I1211 09:55:13.582899 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-inventory\") pod \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " Dec 11 09:55:13 crc kubenswrapper[4788]: I1211 09:55:13.583039 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-ssh-key\") pod \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " Dec 11 09:55:13 crc kubenswrapper[4788]: I1211 09:55:13.583273 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8f7g4\" (UniqueName: \"kubernetes.io/projected/8cceb164-ffe1-4a11-83e9-888f72ad58f0-kube-api-access-8f7g4\") pod \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\" (UID: \"8cceb164-ffe1-4a11-83e9-888f72ad58f0\") " Dec 11 09:55:13 crc kubenswrapper[4788]: I1211 09:55:13.590848 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cceb164-ffe1-4a11-83e9-888f72ad58f0-kube-api-access-8f7g4" (OuterVolumeSpecName: "kube-api-access-8f7g4") pod "8cceb164-ffe1-4a11-83e9-888f72ad58f0" (UID: "8cceb164-ffe1-4a11-83e9-888f72ad58f0"). InnerVolumeSpecName "kube-api-access-8f7g4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:55:13 crc kubenswrapper[4788]: I1211 09:55:13.622100 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8cceb164-ffe1-4a11-83e9-888f72ad58f0" (UID: "8cceb164-ffe1-4a11-83e9-888f72ad58f0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:55:13 crc kubenswrapper[4788]: I1211 09:55:13.623703 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-inventory" (OuterVolumeSpecName: "inventory") pod "8cceb164-ffe1-4a11-83e9-888f72ad58f0" (UID: "8cceb164-ffe1-4a11-83e9-888f72ad58f0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:55:13 crc kubenswrapper[4788]: I1211 09:55:13.686684 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8f7g4\" (UniqueName: \"kubernetes.io/projected/8cceb164-ffe1-4a11-83e9-888f72ad58f0-kube-api-access-8f7g4\") on node \"crc\" DevicePath \"\"" Dec 11 09:55:13 crc kubenswrapper[4788]: I1211 09:55:13.686736 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 09:55:13 crc kubenswrapper[4788]: I1211 09:55:13.686747 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cceb164-ffe1-4a11-83e9-888f72ad58f0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.028150 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" event={"ID":"8cceb164-ffe1-4a11-83e9-888f72ad58f0","Type":"ContainerDied","Data":"e5e5f55f1c7db1ca272a17fdd4dacb8c8e738baaa8b597d26f9c04a40b349d47"} Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.028206 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5e5f55f1c7db1ca272a17fdd4dacb8c8e738baaa8b597d26f9c04a40b349d47" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.028405 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.128749 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk"] Dec 11 09:55:14 crc kubenswrapper[4788]: E1211 09:55:14.129193 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c91a81d-da68-4da8-9081-38be3d8f3213" containerName="extract-content" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.129211 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c91a81d-da68-4da8-9081-38be3d8f3213" containerName="extract-content" Dec 11 09:55:14 crc kubenswrapper[4788]: E1211 09:55:14.129241 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" containerName="registry-server" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.129247 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" containerName="registry-server" Dec 11 09:55:14 crc kubenswrapper[4788]: E1211 09:55:14.129268 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c91a81d-da68-4da8-9081-38be3d8f3213" containerName="registry-server" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.129274 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c91a81d-da68-4da8-9081-38be3d8f3213" containerName="registry-server" Dec 11 09:55:14 crc kubenswrapper[4788]: E1211 09:55:14.129294 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" containerName="extract-content" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.129301 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" containerName="extract-content" Dec 11 09:55:14 crc kubenswrapper[4788]: E1211 09:55:14.129312 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c91a81d-da68-4da8-9081-38be3d8f3213" containerName="extract-utilities" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.129321 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c91a81d-da68-4da8-9081-38be3d8f3213" containerName="extract-utilities" Dec 11 09:55:14 crc kubenswrapper[4788]: E1211 09:55:14.129334 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cceb164-ffe1-4a11-83e9-888f72ad58f0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.129341 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cceb164-ffe1-4a11-83e9-888f72ad58f0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 11 09:55:14 crc kubenswrapper[4788]: E1211 09:55:14.129348 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" containerName="extract-utilities" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.129353 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" containerName="extract-utilities" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.129536 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c91a81d-da68-4da8-9081-38be3d8f3213" containerName="registry-server" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.129559 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cceb164-ffe1-4a11-83e9-888f72ad58f0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.129572 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="eddab76a-ffc7-4c44-b0bf-65e38cf830b6" containerName="registry-server" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.130263 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.134681 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.134976 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.135336 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.135613 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.140713 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk"] Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.197848 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.197894 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.198262 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld52x\" (UniqueName: \"kubernetes.io/projected/1203ac2a-acfa-4b1a-bba7-97eff5508d35-kube-api-access-ld52x\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.300488 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.300556 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.300713 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld52x\" (UniqueName: \"kubernetes.io/projected/1203ac2a-acfa-4b1a-bba7-97eff5508d35-kube-api-access-ld52x\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.307338 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.309176 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.324356 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld52x\" (UniqueName: \"kubernetes.io/projected/1203ac2a-acfa-4b1a-bba7-97eff5508d35-kube-api-access-ld52x\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:55:14 crc kubenswrapper[4788]: I1211 09:55:14.446922 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:55:15 crc kubenswrapper[4788]: I1211 09:55:15.000954 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk"] Dec 11 09:55:15 crc kubenswrapper[4788]: I1211 09:55:15.006327 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 09:55:15 crc kubenswrapper[4788]: I1211 09:55:15.038051 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" event={"ID":"1203ac2a-acfa-4b1a-bba7-97eff5508d35","Type":"ContainerStarted","Data":"c5fd50d98dd8e89e38c8505451ebadac4c47fd6c3b6de4bebdc82d93fd84fdc4"} Dec 11 09:55:17 crc kubenswrapper[4788]: I1211 09:55:17.070148 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" event={"ID":"1203ac2a-acfa-4b1a-bba7-97eff5508d35","Type":"ContainerStarted","Data":"4824c9eb080f2cdefd6e35ab42afa69d5cbac9bbacd6888b86d73df380db91c9"} Dec 11 09:55:17 crc kubenswrapper[4788]: I1211 09:55:17.095095 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" podStartSLOduration=2.461733708 podStartE2EDuration="3.095076633s" podCreationTimestamp="2025-12-11 09:55:14 +0000 UTC" firstStartedPulling="2025-12-11 09:55:15.005998936 +0000 UTC m=+2045.076778522" lastFinishedPulling="2025-12-11 09:55:15.639341861 +0000 UTC m=+2045.710121447" observedRunningTime="2025-12-11 09:55:17.087204913 +0000 UTC m=+2047.157984499" watchObservedRunningTime="2025-12-11 09:55:17.095076633 +0000 UTC m=+2047.165856219" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.369630 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.370180 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.647711 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-84vl5"] Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.650542 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.665477 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-84vl5"] Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.770878 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vlk5\" (UniqueName: \"kubernetes.io/projected/71acc9a2-6244-4dac-80fc-e99adaceaeef-kube-api-access-9vlk5\") pod \"redhat-operators-84vl5\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.771172 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-catalog-content\") pod \"redhat-operators-84vl5\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.771402 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-utilities\") pod \"redhat-operators-84vl5\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.874313 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vlk5\" (UniqueName: \"kubernetes.io/projected/71acc9a2-6244-4dac-80fc-e99adaceaeef-kube-api-access-9vlk5\") pod \"redhat-operators-84vl5\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.874433 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-catalog-content\") pod \"redhat-operators-84vl5\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.874510 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-utilities\") pod \"redhat-operators-84vl5\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.875177 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-catalog-content\") pod \"redhat-operators-84vl5\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.875297 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-utilities\") pod \"redhat-operators-84vl5\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.909086 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vlk5\" (UniqueName: \"kubernetes.io/projected/71acc9a2-6244-4dac-80fc-e99adaceaeef-kube-api-access-9vlk5\") pod \"redhat-operators-84vl5\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:21 crc kubenswrapper[4788]: I1211 09:55:21.978406 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:22 crc kubenswrapper[4788]: I1211 09:55:22.587047 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-84vl5"] Dec 11 09:55:23 crc kubenswrapper[4788]: I1211 09:55:23.127646 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84vl5" event={"ID":"71acc9a2-6244-4dac-80fc-e99adaceaeef","Type":"ContainerStarted","Data":"5ed227e4ca4a3e995077d26b23f6cd8b22e2f5a40730f7285c04a82c4991073a"} Dec 11 09:55:24 crc kubenswrapper[4788]: I1211 09:55:24.140282 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84vl5" event={"ID":"71acc9a2-6244-4dac-80fc-e99adaceaeef","Type":"ContainerStarted","Data":"0d8ee2fb0c394d511ff4590a8d365722ea0d55f33b742930488a92fab1082643"} Dec 11 09:55:25 crc kubenswrapper[4788]: I1211 09:55:25.151632 4788 generic.go:334] "Generic (PLEG): container finished" podID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerID="0d8ee2fb0c394d511ff4590a8d365722ea0d55f33b742930488a92fab1082643" exitCode=0 Dec 11 09:55:25 crc kubenswrapper[4788]: I1211 09:55:25.151705 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84vl5" event={"ID":"71acc9a2-6244-4dac-80fc-e99adaceaeef","Type":"ContainerDied","Data":"0d8ee2fb0c394d511ff4590a8d365722ea0d55f33b742930488a92fab1082643"} Dec 11 09:55:28 crc kubenswrapper[4788]: I1211 09:55:28.180211 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84vl5" event={"ID":"71acc9a2-6244-4dac-80fc-e99adaceaeef","Type":"ContainerStarted","Data":"dcbd1ebc550bce5cee9ae064abf624639213a7520690f286901580222986f3be"} Dec 11 09:55:36 crc kubenswrapper[4788]: I1211 09:55:36.253099 4788 generic.go:334] "Generic (PLEG): container finished" podID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerID="dcbd1ebc550bce5cee9ae064abf624639213a7520690f286901580222986f3be" exitCode=0 Dec 11 09:55:36 crc kubenswrapper[4788]: I1211 09:55:36.253141 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84vl5" event={"ID":"71acc9a2-6244-4dac-80fc-e99adaceaeef","Type":"ContainerDied","Data":"dcbd1ebc550bce5cee9ae064abf624639213a7520690f286901580222986f3be"} Dec 11 09:55:38 crc kubenswrapper[4788]: I1211 09:55:38.276725 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84vl5" event={"ID":"71acc9a2-6244-4dac-80fc-e99adaceaeef","Type":"ContainerStarted","Data":"96a4fa5ac39c9c49c9a0eb1113c9454baf0efed634f606e7c91648b1bad6ade1"} Dec 11 09:55:38 crc kubenswrapper[4788]: I1211 09:55:38.304912 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-84vl5" podStartSLOduration=5.304853969 podStartE2EDuration="17.304890543s" podCreationTimestamp="2025-12-11 09:55:21 +0000 UTC" firstStartedPulling="2025-12-11 09:55:25.155631492 +0000 UTC m=+2055.226411078" lastFinishedPulling="2025-12-11 09:55:37.155668066 +0000 UTC m=+2067.226447652" observedRunningTime="2025-12-11 09:55:38.297534056 +0000 UTC m=+2068.368313642" watchObservedRunningTime="2025-12-11 09:55:38.304890543 +0000 UTC m=+2068.375670129" Dec 11 09:55:41 crc kubenswrapper[4788]: I1211 09:55:41.979140 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:41 crc kubenswrapper[4788]: I1211 09:55:41.979664 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:43 crc kubenswrapper[4788]: I1211 09:55:43.027727 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-84vl5" podUID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerName="registry-server" probeResult="failure" output=< Dec 11 09:55:43 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 09:55:43 crc kubenswrapper[4788]: > Dec 11 09:55:45 crc kubenswrapper[4788]: I1211 09:55:45.738083 4788 scope.go:117] "RemoveContainer" containerID="a626f043b768ee6c833701a226f907a61d2f8f6f59e57623ca2f9511843cdcc2" Dec 11 09:55:45 crc kubenswrapper[4788]: I1211 09:55:45.766247 4788 scope.go:117] "RemoveContainer" containerID="7e85ba6862fe40433404b00293d9e988e3c34dd9089d09be6fed73687447f017" Dec 11 09:55:45 crc kubenswrapper[4788]: I1211 09:55:45.818919 4788 scope.go:117] "RemoveContainer" containerID="60de6d6cb9a869f19f150f4fb24f6cf5211f00c18854517bbabd98513c15c10e" Dec 11 09:55:45 crc kubenswrapper[4788]: I1211 09:55:45.862939 4788 scope.go:117] "RemoveContainer" containerID="4613b52a96d4be613e81ea879c29bdf45de16f2be884cd9fd1d1adb658548319" Dec 11 09:55:45 crc kubenswrapper[4788]: I1211 09:55:45.946472 4788 scope.go:117] "RemoveContainer" containerID="d235260cd5be3c632bd85fef26825f1a0c9126dfc4b4edfb754cd66f5a8e40f1" Dec 11 09:55:45 crc kubenswrapper[4788]: I1211 09:55:45.983317 4788 scope.go:117] "RemoveContainer" containerID="7348082d43fafb12129c0829726e9c150022000d1c73684ee0c16b9144fdc518" Dec 11 09:55:51 crc kubenswrapper[4788]: I1211 09:55:51.368788 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:55:51 crc kubenswrapper[4788]: I1211 09:55:51.369409 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:55:52 crc kubenswrapper[4788]: I1211 09:55:52.032321 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:52 crc kubenswrapper[4788]: I1211 09:55:52.085044 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:52 crc kubenswrapper[4788]: I1211 09:55:52.848382 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-84vl5"] Dec 11 09:55:53 crc kubenswrapper[4788]: I1211 09:55:53.441541 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-84vl5" podUID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerName="registry-server" containerID="cri-o://96a4fa5ac39c9c49c9a0eb1113c9454baf0efed634f606e7c91648b1bad6ade1" gracePeriod=2 Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.453735 4788 generic.go:334] "Generic (PLEG): container finished" podID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerID="96a4fa5ac39c9c49c9a0eb1113c9454baf0efed634f606e7c91648b1bad6ade1" exitCode=0 Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.454035 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84vl5" event={"ID":"71acc9a2-6244-4dac-80fc-e99adaceaeef","Type":"ContainerDied","Data":"96a4fa5ac39c9c49c9a0eb1113c9454baf0efed634f606e7c91648b1bad6ade1"} Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.454134 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-84vl5" event={"ID":"71acc9a2-6244-4dac-80fc-e99adaceaeef","Type":"ContainerDied","Data":"5ed227e4ca4a3e995077d26b23f6cd8b22e2f5a40730f7285c04a82c4991073a"} Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.454148 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ed227e4ca4a3e995077d26b23f6cd8b22e2f5a40730f7285c04a82c4991073a" Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.481966 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.555304 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-utilities\") pod \"71acc9a2-6244-4dac-80fc-e99adaceaeef\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.555358 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-catalog-content\") pod \"71acc9a2-6244-4dac-80fc-e99adaceaeef\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.555391 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vlk5\" (UniqueName: \"kubernetes.io/projected/71acc9a2-6244-4dac-80fc-e99adaceaeef-kube-api-access-9vlk5\") pod \"71acc9a2-6244-4dac-80fc-e99adaceaeef\" (UID: \"71acc9a2-6244-4dac-80fc-e99adaceaeef\") " Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.556519 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-utilities" (OuterVolumeSpecName: "utilities") pod "71acc9a2-6244-4dac-80fc-e99adaceaeef" (UID: "71acc9a2-6244-4dac-80fc-e99adaceaeef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.561344 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71acc9a2-6244-4dac-80fc-e99adaceaeef-kube-api-access-9vlk5" (OuterVolumeSpecName: "kube-api-access-9vlk5") pod "71acc9a2-6244-4dac-80fc-e99adaceaeef" (UID: "71acc9a2-6244-4dac-80fc-e99adaceaeef"). InnerVolumeSpecName "kube-api-access-9vlk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.659073 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vlk5\" (UniqueName: \"kubernetes.io/projected/71acc9a2-6244-4dac-80fc-e99adaceaeef-kube-api-access-9vlk5\") on node \"crc\" DevicePath \"\"" Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.659121 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.682837 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "71acc9a2-6244-4dac-80fc-e99adaceaeef" (UID: "71acc9a2-6244-4dac-80fc-e99adaceaeef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 09:55:54 crc kubenswrapper[4788]: I1211 09:55:54.761657 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71acc9a2-6244-4dac-80fc-e99adaceaeef-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 09:55:55 crc kubenswrapper[4788]: I1211 09:55:55.473169 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-84vl5" Dec 11 09:55:55 crc kubenswrapper[4788]: I1211 09:55:55.510422 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-84vl5"] Dec 11 09:55:55 crc kubenswrapper[4788]: I1211 09:55:55.519669 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-84vl5"] Dec 11 09:55:56 crc kubenswrapper[4788]: I1211 09:55:56.510488 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71acc9a2-6244-4dac-80fc-e99adaceaeef" path="/var/lib/kubelet/pods/71acc9a2-6244-4dac-80fc-e99adaceaeef/volumes" Dec 11 09:56:09 crc kubenswrapper[4788]: I1211 09:56:09.106758 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hzdhz"] Dec 11 09:56:09 crc kubenswrapper[4788]: I1211 09:56:09.121340 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hzdhz"] Dec 11 09:56:10 crc kubenswrapper[4788]: I1211 09:56:10.510465 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50a543f4-3907-44d1-8dca-7c180fc4dab2" path="/var/lib/kubelet/pods/50a543f4-3907-44d1-8dca-7c180fc4dab2/volumes" Dec 11 09:56:21 crc kubenswrapper[4788]: I1211 09:56:21.369870 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:56:21 crc kubenswrapper[4788]: I1211 09:56:21.371543 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:56:21 crc kubenswrapper[4788]: I1211 09:56:21.371683 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:56:21 crc kubenswrapper[4788]: I1211 09:56:21.372670 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bbbc1e80b9d3d5c0ee1a3b47244845f7033d53a0a229c5bae77d253cde3079f8"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 09:56:21 crc kubenswrapper[4788]: I1211 09:56:21.372831 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://bbbc1e80b9d3d5c0ee1a3b47244845f7033d53a0a229c5bae77d253cde3079f8" gracePeriod=600 Dec 11 09:56:21 crc kubenswrapper[4788]: I1211 09:56:21.714817 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="bbbc1e80b9d3d5c0ee1a3b47244845f7033d53a0a229c5bae77d253cde3079f8" exitCode=0 Dec 11 09:56:21 crc kubenswrapper[4788]: I1211 09:56:21.714963 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"bbbc1e80b9d3d5c0ee1a3b47244845f7033d53a0a229c5bae77d253cde3079f8"} Dec 11 09:56:21 crc kubenswrapper[4788]: I1211 09:56:21.715171 4788 scope.go:117] "RemoveContainer" containerID="d386f7beb02fb46ad57f1d807be5eaad6953eba3ceb0429b927e7a70fcbefdd1" Dec 11 09:56:22 crc kubenswrapper[4788]: I1211 09:56:22.725536 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3"} Dec 11 09:56:29 crc kubenswrapper[4788]: I1211 09:56:29.789151 4788 generic.go:334] "Generic (PLEG): container finished" podID="1203ac2a-acfa-4b1a-bba7-97eff5508d35" containerID="4824c9eb080f2cdefd6e35ab42afa69d5cbac9bbacd6888b86d73df380db91c9" exitCode=0 Dec 11 09:56:29 crc kubenswrapper[4788]: I1211 09:56:29.789673 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" event={"ID":"1203ac2a-acfa-4b1a-bba7-97eff5508d35","Type":"ContainerDied","Data":"4824c9eb080f2cdefd6e35ab42afa69d5cbac9bbacd6888b86d73df380db91c9"} Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.311692 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.438781 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-ssh-key\") pod \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.438902 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ld52x\" (UniqueName: \"kubernetes.io/projected/1203ac2a-acfa-4b1a-bba7-97eff5508d35-kube-api-access-ld52x\") pod \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.438934 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-inventory\") pod \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\" (UID: \"1203ac2a-acfa-4b1a-bba7-97eff5508d35\") " Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.445067 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1203ac2a-acfa-4b1a-bba7-97eff5508d35-kube-api-access-ld52x" (OuterVolumeSpecName: "kube-api-access-ld52x") pod "1203ac2a-acfa-4b1a-bba7-97eff5508d35" (UID: "1203ac2a-acfa-4b1a-bba7-97eff5508d35"). InnerVolumeSpecName "kube-api-access-ld52x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.470574 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-inventory" (OuterVolumeSpecName: "inventory") pod "1203ac2a-acfa-4b1a-bba7-97eff5508d35" (UID: "1203ac2a-acfa-4b1a-bba7-97eff5508d35"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.472004 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1203ac2a-acfa-4b1a-bba7-97eff5508d35" (UID: "1203ac2a-acfa-4b1a-bba7-97eff5508d35"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.541250 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ld52x\" (UniqueName: \"kubernetes.io/projected/1203ac2a-acfa-4b1a-bba7-97eff5508d35-kube-api-access-ld52x\") on node \"crc\" DevicePath \"\"" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.541333 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.541349 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1203ac2a-acfa-4b1a-bba7-97eff5508d35-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.810744 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" event={"ID":"1203ac2a-acfa-4b1a-bba7-97eff5508d35","Type":"ContainerDied","Data":"c5fd50d98dd8e89e38c8505451ebadac4c47fd6c3b6de4bebdc82d93fd84fdc4"} Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.811087 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5fd50d98dd8e89e38c8505451ebadac4c47fd6c3b6de4bebdc82d93fd84fdc4" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.810854 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.905012 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq"] Dec 11 09:56:31 crc kubenswrapper[4788]: E1211 09:56:31.905462 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerName="extract-content" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.905487 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerName="extract-content" Dec 11 09:56:31 crc kubenswrapper[4788]: E1211 09:56:31.905516 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1203ac2a-acfa-4b1a-bba7-97eff5508d35" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.905525 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1203ac2a-acfa-4b1a-bba7-97eff5508d35" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 11 09:56:31 crc kubenswrapper[4788]: E1211 09:56:31.905540 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerName="registry-server" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.905547 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerName="registry-server" Dec 11 09:56:31 crc kubenswrapper[4788]: E1211 09:56:31.905591 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerName="extract-utilities" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.905599 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerName="extract-utilities" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.905812 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="71acc9a2-6244-4dac-80fc-e99adaceaeef" containerName="registry-server" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.905840 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="1203ac2a-acfa-4b1a-bba7-97eff5508d35" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.908184 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.910352 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.910581 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.910927 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.911181 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:56:31 crc kubenswrapper[4788]: I1211 09:56:31.917255 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq"] Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.052960 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-g8htq\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.053302 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqkht\" (UniqueName: \"kubernetes.io/projected/f49b48e9-71fc-4a17-9cf2-c5831649bda2-kube-api-access-wqkht\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-g8htq\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.053345 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-g8htq\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.154768 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-g8htq\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.154831 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqkht\" (UniqueName: \"kubernetes.io/projected/f49b48e9-71fc-4a17-9cf2-c5831649bda2-kube-api-access-wqkht\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-g8htq\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.154886 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-g8htq\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.163896 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-g8htq\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.165671 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-g8htq\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.177604 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqkht\" (UniqueName: \"kubernetes.io/projected/f49b48e9-71fc-4a17-9cf2-c5831649bda2-kube-api-access-wqkht\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-g8htq\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.242656 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.786506 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq"] Dec 11 09:56:32 crc kubenswrapper[4788]: I1211 09:56:32.828402 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" event={"ID":"f49b48e9-71fc-4a17-9cf2-c5831649bda2","Type":"ContainerStarted","Data":"c3a65a86c725c5cda4a2b064a8211a73913385efac019ba91ecfa8337570fa7f"} Dec 11 09:56:33 crc kubenswrapper[4788]: I1211 09:56:33.840584 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" event={"ID":"f49b48e9-71fc-4a17-9cf2-c5831649bda2","Type":"ContainerStarted","Data":"5f8e172740082579c7080814fe27f7d322beb8a20148a468b30eae94a8c96aee"} Dec 11 09:56:33 crc kubenswrapper[4788]: I1211 09:56:33.866773 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" podStartSLOduration=2.666591487 podStartE2EDuration="2.866752281s" podCreationTimestamp="2025-12-11 09:56:31 +0000 UTC" firstStartedPulling="2025-12-11 09:56:32.796724174 +0000 UTC m=+2122.867503760" lastFinishedPulling="2025-12-11 09:56:32.996884968 +0000 UTC m=+2123.067664554" observedRunningTime="2025-12-11 09:56:33.862822701 +0000 UTC m=+2123.933602297" watchObservedRunningTime="2025-12-11 09:56:33.866752281 +0000 UTC m=+2123.937531877" Dec 11 09:56:38 crc kubenswrapper[4788]: I1211 09:56:38.895141 4788 generic.go:334] "Generic (PLEG): container finished" podID="f49b48e9-71fc-4a17-9cf2-c5831649bda2" containerID="5f8e172740082579c7080814fe27f7d322beb8a20148a468b30eae94a8c96aee" exitCode=0 Dec 11 09:56:38 crc kubenswrapper[4788]: I1211 09:56:38.895373 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" event={"ID":"f49b48e9-71fc-4a17-9cf2-c5831649bda2","Type":"ContainerDied","Data":"5f8e172740082579c7080814fe27f7d322beb8a20148a468b30eae94a8c96aee"} Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.064844 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-wx8sg"] Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.076793 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-wx8sg"] Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.395737 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.511534 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e204962-9911-4a7c-b2da-b4a614f548a6" path="/var/lib/kubelet/pods/2e204962-9911-4a7c-b2da-b4a614f548a6/volumes" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.536561 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqkht\" (UniqueName: \"kubernetes.io/projected/f49b48e9-71fc-4a17-9cf2-c5831649bda2-kube-api-access-wqkht\") pod \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.536873 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-ssh-key\") pod \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.536987 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-inventory\") pod \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\" (UID: \"f49b48e9-71fc-4a17-9cf2-c5831649bda2\") " Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.546018 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f49b48e9-71fc-4a17-9cf2-c5831649bda2-kube-api-access-wqkht" (OuterVolumeSpecName: "kube-api-access-wqkht") pod "f49b48e9-71fc-4a17-9cf2-c5831649bda2" (UID: "f49b48e9-71fc-4a17-9cf2-c5831649bda2"). InnerVolumeSpecName "kube-api-access-wqkht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.573144 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f49b48e9-71fc-4a17-9cf2-c5831649bda2" (UID: "f49b48e9-71fc-4a17-9cf2-c5831649bda2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.573553 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-inventory" (OuterVolumeSpecName: "inventory") pod "f49b48e9-71fc-4a17-9cf2-c5831649bda2" (UID: "f49b48e9-71fc-4a17-9cf2-c5831649bda2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.640549 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqkht\" (UniqueName: \"kubernetes.io/projected/f49b48e9-71fc-4a17-9cf2-c5831649bda2-kube-api-access-wqkht\") on node \"crc\" DevicePath \"\"" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.640588 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.640599 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f49b48e9-71fc-4a17-9cf2-c5831649bda2-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.917804 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" event={"ID":"f49b48e9-71fc-4a17-9cf2-c5831649bda2","Type":"ContainerDied","Data":"c3a65a86c725c5cda4a2b064a8211a73913385efac019ba91ecfa8337570fa7f"} Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.917881 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-g8htq" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.917883 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3a65a86c725c5cda4a2b064a8211a73913385efac019ba91ecfa8337570fa7f" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.981185 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r"] Dec 11 09:56:40 crc kubenswrapper[4788]: E1211 09:56:40.981638 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f49b48e9-71fc-4a17-9cf2-c5831649bda2" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.981656 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f49b48e9-71fc-4a17-9cf2-c5831649bda2" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.981887 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="f49b48e9-71fc-4a17-9cf2-c5831649bda2" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 11 09:56:40 crc kubenswrapper[4788]: I1211 09:56:40.982600 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:40.994100 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r"] Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.039103 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.039169 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.039427 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.039484 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.064968 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fkd9r\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.065440 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fkd9r\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.065732 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thvkq\" (UniqueName: \"kubernetes.io/projected/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-kube-api-access-thvkq\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fkd9r\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.168624 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fkd9r\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.168798 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thvkq\" (UniqueName: \"kubernetes.io/projected/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-kube-api-access-thvkq\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fkd9r\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.168837 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fkd9r\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.173027 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fkd9r\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.173403 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fkd9r\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.190098 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thvkq\" (UniqueName: \"kubernetes.io/projected/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-kube-api-access-thvkq\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-fkd9r\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.370147 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:56:41 crc kubenswrapper[4788]: W1211 09:56:41.911995 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdb8e800_98d6_4c2a_be3a_773e70a3dbff.slice/crio-c707f89acac493dd1a3579d2c9848d104abe5ebc557c3eaf80048a4acb417c45 WatchSource:0}: Error finding container c707f89acac493dd1a3579d2c9848d104abe5ebc557c3eaf80048a4acb417c45: Status 404 returned error can't find the container with id c707f89acac493dd1a3579d2c9848d104abe5ebc557c3eaf80048a4acb417c45 Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.912076 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r"] Dec 11 09:56:41 crc kubenswrapper[4788]: I1211 09:56:41.929321 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" event={"ID":"fdb8e800-98d6-4c2a-be3a-773e70a3dbff","Type":"ContainerStarted","Data":"c707f89acac493dd1a3579d2c9848d104abe5ebc557c3eaf80048a4acb417c45"} Dec 11 09:56:42 crc kubenswrapper[4788]: I1211 09:56:42.940580 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" event={"ID":"fdb8e800-98d6-4c2a-be3a-773e70a3dbff","Type":"ContainerStarted","Data":"f96d6ec3ef87e3108248818d069cf6af6b291e457c583a7cf628037422125211"} Dec 11 09:56:42 crc kubenswrapper[4788]: I1211 09:56:42.964641 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" podStartSLOduration=2.775401419 podStartE2EDuration="2.964622714s" podCreationTimestamp="2025-12-11 09:56:40 +0000 UTC" firstStartedPulling="2025-12-11 09:56:41.915257044 +0000 UTC m=+2131.986036630" lastFinishedPulling="2025-12-11 09:56:42.104478339 +0000 UTC m=+2132.175257925" observedRunningTime="2025-12-11 09:56:42.96092416 +0000 UTC m=+2133.031703736" watchObservedRunningTime="2025-12-11 09:56:42.964622714 +0000 UTC m=+2133.035402300" Dec 11 09:56:46 crc kubenswrapper[4788]: I1211 09:56:46.143188 4788 scope.go:117] "RemoveContainer" containerID="b16be0b3a5f035eebf33ca393d64fc4c5a56726c361a09800819d27f44825651" Dec 11 09:56:46 crc kubenswrapper[4788]: I1211 09:56:46.209753 4788 scope.go:117] "RemoveContainer" containerID="140fe4839241daf0c592246ae12ef1c55791f6e7c12445313a4b142c69fa8a0d" Dec 11 09:56:59 crc kubenswrapper[4788]: I1211 09:56:59.049799 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jgd9g"] Dec 11 09:56:59 crc kubenswrapper[4788]: I1211 09:56:59.061285 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jgd9g"] Dec 11 09:57:00 crc kubenswrapper[4788]: I1211 09:57:00.505258 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef23e667-905e-41dd-a0ac-9739133fbbf9" path="/var/lib/kubelet/pods/ef23e667-905e-41dd-a0ac-9739133fbbf9/volumes" Dec 11 09:57:22 crc kubenswrapper[4788]: I1211 09:57:22.326670 4788 generic.go:334] "Generic (PLEG): container finished" podID="fdb8e800-98d6-4c2a-be3a-773e70a3dbff" containerID="f96d6ec3ef87e3108248818d069cf6af6b291e457c583a7cf628037422125211" exitCode=0 Dec 11 09:57:22 crc kubenswrapper[4788]: I1211 09:57:22.326775 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" event={"ID":"fdb8e800-98d6-4c2a-be3a-773e70a3dbff","Type":"ContainerDied","Data":"f96d6ec3ef87e3108248818d069cf6af6b291e457c583a7cf628037422125211"} Dec 11 09:57:23 crc kubenswrapper[4788]: I1211 09:57:23.863082 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:57:23 crc kubenswrapper[4788]: I1211 09:57:23.915902 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-ssh-key\") pod \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " Dec 11 09:57:23 crc kubenswrapper[4788]: I1211 09:57:23.960647 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fdb8e800-98d6-4c2a-be3a-773e70a3dbff" (UID: "fdb8e800-98d6-4c2a-be3a-773e70a3dbff"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.020128 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thvkq\" (UniqueName: \"kubernetes.io/projected/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-kube-api-access-thvkq\") pod \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.020305 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-inventory\") pod \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\" (UID: \"fdb8e800-98d6-4c2a-be3a-773e70a3dbff\") " Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.020861 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.024168 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-kube-api-access-thvkq" (OuterVolumeSpecName: "kube-api-access-thvkq") pod "fdb8e800-98d6-4c2a-be3a-773e70a3dbff" (UID: "fdb8e800-98d6-4c2a-be3a-773e70a3dbff"). InnerVolumeSpecName "kube-api-access-thvkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.051436 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-inventory" (OuterVolumeSpecName: "inventory") pod "fdb8e800-98d6-4c2a-be3a-773e70a3dbff" (UID: "fdb8e800-98d6-4c2a-be3a-773e70a3dbff"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.124524 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thvkq\" (UniqueName: \"kubernetes.io/projected/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-kube-api-access-thvkq\") on node \"crc\" DevicePath \"\"" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.124593 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fdb8e800-98d6-4c2a-be3a-773e70a3dbff-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.383658 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" event={"ID":"fdb8e800-98d6-4c2a-be3a-773e70a3dbff","Type":"ContainerDied","Data":"c707f89acac493dd1a3579d2c9848d104abe5ebc557c3eaf80048a4acb417c45"} Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.383725 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c707f89acac493dd1a3579d2c9848d104abe5ebc557c3eaf80048a4acb417c45" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.383825 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-fkd9r" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.430174 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg"] Dec 11 09:57:24 crc kubenswrapper[4788]: E1211 09:57:24.430711 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdb8e800-98d6-4c2a-be3a-773e70a3dbff" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.430736 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdb8e800-98d6-4c2a-be3a-773e70a3dbff" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.431067 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdb8e800-98d6-4c2a-be3a-773e70a3dbff" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.431988 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.436935 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.437537 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.437614 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.437826 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.447936 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg"] Dec 11 09:57:24 crc kubenswrapper[4788]: E1211 09:57:24.530279 4788 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdb8e800_98d6_4c2a_be3a_773e70a3dbff.slice/crio-c707f89acac493dd1a3579d2c9848d104abe5ebc557c3eaf80048a4acb417c45\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdb8e800_98d6_4c2a_be3a_773e70a3dbff.slice\": RecentStats: unable to find data in memory cache]" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.534516 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xvkj\" (UniqueName: \"kubernetes.io/projected/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-kube-api-access-9xvkj\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.534586 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.534649 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.636104 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xvkj\" (UniqueName: \"kubernetes.io/projected/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-kube-api-access-9xvkj\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.636162 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.636207 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.643428 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.643787 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.657294 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xvkj\" (UniqueName: \"kubernetes.io/projected/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-kube-api-access-9xvkj\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:57:24 crc kubenswrapper[4788]: I1211 09:57:24.681741 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:57:25 crc kubenswrapper[4788]: I1211 09:57:25.262568 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg"] Dec 11 09:57:25 crc kubenswrapper[4788]: I1211 09:57:25.393849 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" event={"ID":"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d","Type":"ContainerStarted","Data":"497615e05382757ee7bbc9d29dc2e8ab7996b5c921d7b2062607fbaa9e7205e1"} Dec 11 09:57:26 crc kubenswrapper[4788]: I1211 09:57:26.048200 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-4664m"] Dec 11 09:57:26 crc kubenswrapper[4788]: I1211 09:57:26.057842 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-4664m"] Dec 11 09:57:26 crc kubenswrapper[4788]: I1211 09:57:26.403849 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" event={"ID":"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d","Type":"ContainerStarted","Data":"071cd8ea701b747ba6423ff8a30ef4e6dd056c53980381f2080b379abb7e89c8"} Dec 11 09:57:26 crc kubenswrapper[4788]: I1211 09:57:26.440418 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" podStartSLOduration=2.276417409 podStartE2EDuration="2.440395094s" podCreationTimestamp="2025-12-11 09:57:24 +0000 UTC" firstStartedPulling="2025-12-11 09:57:25.278356762 +0000 UTC m=+2175.349136348" lastFinishedPulling="2025-12-11 09:57:25.442334447 +0000 UTC m=+2175.513114033" observedRunningTime="2025-12-11 09:57:26.434386292 +0000 UTC m=+2176.505165878" watchObservedRunningTime="2025-12-11 09:57:26.440395094 +0000 UTC m=+2176.511174670" Dec 11 09:57:26 crc kubenswrapper[4788]: I1211 09:57:26.507325 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1098e947-9b9a-4eeb-8ecd-1c0e1253adcc" path="/var/lib/kubelet/pods/1098e947-9b9a-4eeb-8ecd-1c0e1253adcc/volumes" Dec 11 09:57:46 crc kubenswrapper[4788]: I1211 09:57:46.320242 4788 scope.go:117] "RemoveContainer" containerID="3798fe725fc33adf8133e9ae4262c73f9178e094151000712e63f33c72d1add7" Dec 11 09:57:46 crc kubenswrapper[4788]: I1211 09:57:46.370011 4788 scope.go:117] "RemoveContainer" containerID="043f5585e5f2c06c04bcd6ac5c529b2f95bc9545bc4b9683ceb450ec444d1dea" Dec 11 09:58:16 crc kubenswrapper[4788]: I1211 09:58:16.866914 4788 generic.go:334] "Generic (PLEG): container finished" podID="7eb3182b-8f0a-4c94-b59b-4d631cd5f52d" containerID="071cd8ea701b747ba6423ff8a30ef4e6dd056c53980381f2080b379abb7e89c8" exitCode=0 Dec 11 09:58:16 crc kubenswrapper[4788]: I1211 09:58:16.867035 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" event={"ID":"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d","Type":"ContainerDied","Data":"071cd8ea701b747ba6423ff8a30ef4e6dd056c53980381f2080b379abb7e89c8"} Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.460932 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.509836 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-ssh-key\") pod \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.509978 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-inventory\") pod \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.510158 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xvkj\" (UniqueName: \"kubernetes.io/projected/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-kube-api-access-9xvkj\") pod \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\" (UID: \"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d\") " Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.519721 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-kube-api-access-9xvkj" (OuterVolumeSpecName: "kube-api-access-9xvkj") pod "7eb3182b-8f0a-4c94-b59b-4d631cd5f52d" (UID: "7eb3182b-8f0a-4c94-b59b-4d631cd5f52d"). InnerVolumeSpecName "kube-api-access-9xvkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.545711 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-inventory" (OuterVolumeSpecName: "inventory") pod "7eb3182b-8f0a-4c94-b59b-4d631cd5f52d" (UID: "7eb3182b-8f0a-4c94-b59b-4d631cd5f52d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.546301 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7eb3182b-8f0a-4c94-b59b-4d631cd5f52d" (UID: "7eb3182b-8f0a-4c94-b59b-4d631cd5f52d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.615070 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xvkj\" (UniqueName: \"kubernetes.io/projected/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-kube-api-access-9xvkj\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.615124 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.615138 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7eb3182b-8f0a-4c94-b59b-4d631cd5f52d-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.885872 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" event={"ID":"7eb3182b-8f0a-4c94-b59b-4d631cd5f52d","Type":"ContainerDied","Data":"497615e05382757ee7bbc9d29dc2e8ab7996b5c921d7b2062607fbaa9e7205e1"} Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.885917 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="497615e05382757ee7bbc9d29dc2e8ab7996b5c921d7b2062607fbaa9e7205e1" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.886305 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.980477 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-w29p9"] Dec 11 09:58:18 crc kubenswrapper[4788]: E1211 09:58:18.981021 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb3182b-8f0a-4c94-b59b-4d631cd5f52d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.981043 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb3182b-8f0a-4c94-b59b-4d631cd5f52d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.981353 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eb3182b-8f0a-4c94-b59b-4d631cd5f52d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.982155 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.985310 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.985566 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.985832 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.986954 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:58:18 crc kubenswrapper[4788]: I1211 09:58:18.995631 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-w29p9"] Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.023636 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rvm4\" (UniqueName: \"kubernetes.io/projected/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-kube-api-access-5rvm4\") pod \"ssh-known-hosts-edpm-deployment-w29p9\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.023918 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-w29p9\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.024115 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-w29p9\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.128090 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rvm4\" (UniqueName: \"kubernetes.io/projected/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-kube-api-access-5rvm4\") pod \"ssh-known-hosts-edpm-deployment-w29p9\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.128863 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-w29p9\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.129540 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-w29p9\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.133192 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-w29p9\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.142757 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-w29p9\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.146388 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rvm4\" (UniqueName: \"kubernetes.io/projected/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-kube-api-access-5rvm4\") pod \"ssh-known-hosts-edpm-deployment-w29p9\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.304035 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.858153 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-w29p9"] Dec 11 09:58:19 crc kubenswrapper[4788]: I1211 09:58:19.899614 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" event={"ID":"bb7a8155-00dc-4dc1-9fc3-92417a6264ff","Type":"ContainerStarted","Data":"ce5eb6f6f36edc920ab06ae1c60bd767d5d61e6008c527e1d56face522af676d"} Dec 11 09:58:20 crc kubenswrapper[4788]: I1211 09:58:20.927307 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" event={"ID":"bb7a8155-00dc-4dc1-9fc3-92417a6264ff","Type":"ContainerStarted","Data":"c4481fb481e8d07004c93f520758b13d26b67c34fe269588cf045583e2d03e08"} Dec 11 09:58:20 crc kubenswrapper[4788]: I1211 09:58:20.957816 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" podStartSLOduration=2.735459161 podStartE2EDuration="2.957791217s" podCreationTimestamp="2025-12-11 09:58:18 +0000 UTC" firstStartedPulling="2025-12-11 09:58:19.863120865 +0000 UTC m=+2229.933900451" lastFinishedPulling="2025-12-11 09:58:20.085452921 +0000 UTC m=+2230.156232507" observedRunningTime="2025-12-11 09:58:20.949311312 +0000 UTC m=+2231.020090918" watchObservedRunningTime="2025-12-11 09:58:20.957791217 +0000 UTC m=+2231.028570803" Dec 11 09:58:21 crc kubenswrapper[4788]: I1211 09:58:21.369371 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:58:21 crc kubenswrapper[4788]: I1211 09:58:21.369468 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:58:28 crc kubenswrapper[4788]: I1211 09:58:28.018089 4788 generic.go:334] "Generic (PLEG): container finished" podID="bb7a8155-00dc-4dc1-9fc3-92417a6264ff" containerID="c4481fb481e8d07004c93f520758b13d26b67c34fe269588cf045583e2d03e08" exitCode=0 Dec 11 09:58:28 crc kubenswrapper[4788]: I1211 09:58:28.018259 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" event={"ID":"bb7a8155-00dc-4dc1-9fc3-92417a6264ff","Type":"ContainerDied","Data":"c4481fb481e8d07004c93f520758b13d26b67c34fe269588cf045583e2d03e08"} Dec 11 09:58:29 crc kubenswrapper[4788]: I1211 09:58:29.447326 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:29 crc kubenswrapper[4788]: I1211 09:58:29.468682 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-inventory-0\") pod \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " Dec 11 09:58:29 crc kubenswrapper[4788]: I1211 09:58:29.468760 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rvm4\" (UniqueName: \"kubernetes.io/projected/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-kube-api-access-5rvm4\") pod \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " Dec 11 09:58:29 crc kubenswrapper[4788]: I1211 09:58:29.468828 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-ssh-key-openstack-edpm-ipam\") pod \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\" (UID: \"bb7a8155-00dc-4dc1-9fc3-92417a6264ff\") " Dec 11 09:58:29 crc kubenswrapper[4788]: I1211 09:58:29.476853 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-kube-api-access-5rvm4" (OuterVolumeSpecName: "kube-api-access-5rvm4") pod "bb7a8155-00dc-4dc1-9fc3-92417a6264ff" (UID: "bb7a8155-00dc-4dc1-9fc3-92417a6264ff"). InnerVolumeSpecName "kube-api-access-5rvm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:58:29 crc kubenswrapper[4788]: I1211 09:58:29.507155 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "bb7a8155-00dc-4dc1-9fc3-92417a6264ff" (UID: "bb7a8155-00dc-4dc1-9fc3-92417a6264ff"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:58:29 crc kubenswrapper[4788]: I1211 09:58:29.531853 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "bb7a8155-00dc-4dc1-9fc3-92417a6264ff" (UID: "bb7a8155-00dc-4dc1-9fc3-92417a6264ff"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:58:29 crc kubenswrapper[4788]: I1211 09:58:29.571933 4788 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:29 crc kubenswrapper[4788]: I1211 09:58:29.571990 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rvm4\" (UniqueName: \"kubernetes.io/projected/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-kube-api-access-5rvm4\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:29 crc kubenswrapper[4788]: I1211 09:58:29.572006 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/bb7a8155-00dc-4dc1-9fc3-92417a6264ff-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.037890 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" event={"ID":"bb7a8155-00dc-4dc1-9fc3-92417a6264ff","Type":"ContainerDied","Data":"ce5eb6f6f36edc920ab06ae1c60bd767d5d61e6008c527e1d56face522af676d"} Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.038343 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce5eb6f6f36edc920ab06ae1c60bd767d5d61e6008c527e1d56face522af676d" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.038163 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-w29p9" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.140591 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4"] Dec 11 09:58:30 crc kubenswrapper[4788]: E1211 09:58:30.142716 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb7a8155-00dc-4dc1-9fc3-92417a6264ff" containerName="ssh-known-hosts-edpm-deployment" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.142756 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb7a8155-00dc-4dc1-9fc3-92417a6264ff" containerName="ssh-known-hosts-edpm-deployment" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.143292 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb7a8155-00dc-4dc1-9fc3-92417a6264ff" containerName="ssh-known-hosts-edpm-deployment" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.144439 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.149520 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.149709 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.149563 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.149986 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.159027 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4"] Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.184001 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-599kp\" (UniqueName: \"kubernetes.io/projected/5459ff9c-bd04-47d1-ade7-e52983b8fc86-kube-api-access-599kp\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-qwvh4\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.184800 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-qwvh4\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.184971 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-qwvh4\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.289096 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-qwvh4\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.289531 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-qwvh4\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.289854 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-599kp\" (UniqueName: \"kubernetes.io/projected/5459ff9c-bd04-47d1-ade7-e52983b8fc86-kube-api-access-599kp\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-qwvh4\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.294533 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-qwvh4\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.298693 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-qwvh4\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.316846 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-599kp\" (UniqueName: \"kubernetes.io/projected/5459ff9c-bd04-47d1-ade7-e52983b8fc86-kube-api-access-599kp\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-qwvh4\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:30 crc kubenswrapper[4788]: I1211 09:58:30.475539 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:31 crc kubenswrapper[4788]: I1211 09:58:31.061060 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4"] Dec 11 09:58:32 crc kubenswrapper[4788]: I1211 09:58:32.062185 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" event={"ID":"5459ff9c-bd04-47d1-ade7-e52983b8fc86","Type":"ContainerStarted","Data":"2e75af550098b31c6258c97119c22368788b4bd15d795bf367f597653df24f3d"} Dec 11 09:58:32 crc kubenswrapper[4788]: I1211 09:58:32.062853 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" event={"ID":"5459ff9c-bd04-47d1-ade7-e52983b8fc86","Type":"ContainerStarted","Data":"761c52ea294ed3c6199dc0ebef2e05f46ab10dca8fd3a1f6fb9cd71f1739aade"} Dec 11 09:58:32 crc kubenswrapper[4788]: I1211 09:58:32.094334 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" podStartSLOduration=1.882090458 podStartE2EDuration="2.094312678s" podCreationTimestamp="2025-12-11 09:58:30 +0000 UTC" firstStartedPulling="2025-12-11 09:58:31.067655113 +0000 UTC m=+2241.138434709" lastFinishedPulling="2025-12-11 09:58:31.279877343 +0000 UTC m=+2241.350656929" observedRunningTime="2025-12-11 09:58:32.085896084 +0000 UTC m=+2242.156675670" watchObservedRunningTime="2025-12-11 09:58:32.094312678 +0000 UTC m=+2242.165092264" Dec 11 09:58:40 crc kubenswrapper[4788]: I1211 09:58:40.161257 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" event={"ID":"5459ff9c-bd04-47d1-ade7-e52983b8fc86","Type":"ContainerDied","Data":"2e75af550098b31c6258c97119c22368788b4bd15d795bf367f597653df24f3d"} Dec 11 09:58:40 crc kubenswrapper[4788]: I1211 09:58:40.161140 4788 generic.go:334] "Generic (PLEG): container finished" podID="5459ff9c-bd04-47d1-ade7-e52983b8fc86" containerID="2e75af550098b31c6258c97119c22368788b4bd15d795bf367f597653df24f3d" exitCode=0 Dec 11 09:58:41 crc kubenswrapper[4788]: I1211 09:58:41.587260 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:41 crc kubenswrapper[4788]: I1211 09:58:41.657698 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-599kp\" (UniqueName: \"kubernetes.io/projected/5459ff9c-bd04-47d1-ade7-e52983b8fc86-kube-api-access-599kp\") pod \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " Dec 11 09:58:41 crc kubenswrapper[4788]: I1211 09:58:41.657947 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-inventory\") pod \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " Dec 11 09:58:41 crc kubenswrapper[4788]: I1211 09:58:41.658104 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-ssh-key\") pod \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\" (UID: \"5459ff9c-bd04-47d1-ade7-e52983b8fc86\") " Dec 11 09:58:41 crc kubenswrapper[4788]: I1211 09:58:41.665556 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5459ff9c-bd04-47d1-ade7-e52983b8fc86-kube-api-access-599kp" (OuterVolumeSpecName: "kube-api-access-599kp") pod "5459ff9c-bd04-47d1-ade7-e52983b8fc86" (UID: "5459ff9c-bd04-47d1-ade7-e52983b8fc86"). InnerVolumeSpecName "kube-api-access-599kp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:58:41 crc kubenswrapper[4788]: I1211 09:58:41.688344 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5459ff9c-bd04-47d1-ade7-e52983b8fc86" (UID: "5459ff9c-bd04-47d1-ade7-e52983b8fc86"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:58:41 crc kubenswrapper[4788]: I1211 09:58:41.693253 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-inventory" (OuterVolumeSpecName: "inventory") pod "5459ff9c-bd04-47d1-ade7-e52983b8fc86" (UID: "5459ff9c-bd04-47d1-ade7-e52983b8fc86"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:58:41 crc kubenswrapper[4788]: I1211 09:58:41.760350 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:41 crc kubenswrapper[4788]: I1211 09:58:41.760640 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-599kp\" (UniqueName: \"kubernetes.io/projected/5459ff9c-bd04-47d1-ade7-e52983b8fc86-kube-api-access-599kp\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:41 crc kubenswrapper[4788]: I1211 09:58:41.760656 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5459ff9c-bd04-47d1-ade7-e52983b8fc86-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.182289 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" event={"ID":"5459ff9c-bd04-47d1-ade7-e52983b8fc86","Type":"ContainerDied","Data":"761c52ea294ed3c6199dc0ebef2e05f46ab10dca8fd3a1f6fb9cd71f1739aade"} Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.182348 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="761c52ea294ed3c6199dc0ebef2e05f46ab10dca8fd3a1f6fb9cd71f1739aade" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.182376 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-qwvh4" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.254587 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm"] Dec 11 09:58:42 crc kubenswrapper[4788]: E1211 09:58:42.255212 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5459ff9c-bd04-47d1-ade7-e52983b8fc86" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.255244 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="5459ff9c-bd04-47d1-ade7-e52983b8fc86" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.255477 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="5459ff9c-bd04-47d1-ade7-e52983b8fc86" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.256810 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.260655 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.261317 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.262564 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.265836 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.278418 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm"] Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.389892 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbt58\" (UniqueName: \"kubernetes.io/projected/ecf372fc-dfed-418f-904f-8a2e485acbcd-kube-api-access-qbt58\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.390066 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.390102 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.492970 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.493041 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.493131 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbt58\" (UniqueName: \"kubernetes.io/projected/ecf372fc-dfed-418f-904f-8a2e485acbcd-kube-api-access-qbt58\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.498479 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.499990 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.510249 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbt58\" (UniqueName: \"kubernetes.io/projected/ecf372fc-dfed-418f-904f-8a2e485acbcd-kube-api-access-qbt58\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:42 crc kubenswrapper[4788]: I1211 09:58:42.585714 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:43 crc kubenswrapper[4788]: I1211 09:58:43.177307 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm"] Dec 11 09:58:44 crc kubenswrapper[4788]: I1211 09:58:44.224600 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" event={"ID":"ecf372fc-dfed-418f-904f-8a2e485acbcd","Type":"ContainerStarted","Data":"8fdde83f4e872bf9f5a06043536544a39f706bd536d641949c6b5db27cbf6801"} Dec 11 09:58:44 crc kubenswrapper[4788]: I1211 09:58:44.225551 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" event={"ID":"ecf372fc-dfed-418f-904f-8a2e485acbcd","Type":"ContainerStarted","Data":"83d29c9f9ac9a6d86bf9ec3f465a922e1e888315d2a45e15fc228179254776a6"} Dec 11 09:58:44 crc kubenswrapper[4788]: I1211 09:58:44.248091 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" podStartSLOduration=2.037630069 podStartE2EDuration="2.248069433s" podCreationTimestamp="2025-12-11 09:58:42 +0000 UTC" firstStartedPulling="2025-12-11 09:58:43.18952518 +0000 UTC m=+2253.260304766" lastFinishedPulling="2025-12-11 09:58:43.399964554 +0000 UTC m=+2253.470744130" observedRunningTime="2025-12-11 09:58:44.244341099 +0000 UTC m=+2254.315120685" watchObservedRunningTime="2025-12-11 09:58:44.248069433 +0000 UTC m=+2254.318849029" Dec 11 09:58:51 crc kubenswrapper[4788]: I1211 09:58:51.369475 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:58:51 crc kubenswrapper[4788]: I1211 09:58:51.370200 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:58:54 crc kubenswrapper[4788]: I1211 09:58:54.321955 4788 generic.go:334] "Generic (PLEG): container finished" podID="ecf372fc-dfed-418f-904f-8a2e485acbcd" containerID="8fdde83f4e872bf9f5a06043536544a39f706bd536d641949c6b5db27cbf6801" exitCode=0 Dec 11 09:58:54 crc kubenswrapper[4788]: I1211 09:58:54.322130 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" event={"ID":"ecf372fc-dfed-418f-904f-8a2e485acbcd","Type":"ContainerDied","Data":"8fdde83f4e872bf9f5a06043536544a39f706bd536d641949c6b5db27cbf6801"} Dec 11 09:58:55 crc kubenswrapper[4788]: I1211 09:58:55.785697 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:55 crc kubenswrapper[4788]: I1211 09:58:55.800010 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-ssh-key\") pod \"ecf372fc-dfed-418f-904f-8a2e485acbcd\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " Dec 11 09:58:55 crc kubenswrapper[4788]: I1211 09:58:55.800244 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-inventory\") pod \"ecf372fc-dfed-418f-904f-8a2e485acbcd\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " Dec 11 09:58:55 crc kubenswrapper[4788]: I1211 09:58:55.800442 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbt58\" (UniqueName: \"kubernetes.io/projected/ecf372fc-dfed-418f-904f-8a2e485acbcd-kube-api-access-qbt58\") pod \"ecf372fc-dfed-418f-904f-8a2e485acbcd\" (UID: \"ecf372fc-dfed-418f-904f-8a2e485acbcd\") " Dec 11 09:58:55 crc kubenswrapper[4788]: I1211 09:58:55.811691 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecf372fc-dfed-418f-904f-8a2e485acbcd-kube-api-access-qbt58" (OuterVolumeSpecName: "kube-api-access-qbt58") pod "ecf372fc-dfed-418f-904f-8a2e485acbcd" (UID: "ecf372fc-dfed-418f-904f-8a2e485acbcd"). InnerVolumeSpecName "kube-api-access-qbt58". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:58:55 crc kubenswrapper[4788]: I1211 09:58:55.835199 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-inventory" (OuterVolumeSpecName: "inventory") pod "ecf372fc-dfed-418f-904f-8a2e485acbcd" (UID: "ecf372fc-dfed-418f-904f-8a2e485acbcd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:58:55 crc kubenswrapper[4788]: I1211 09:58:55.837816 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ecf372fc-dfed-418f-904f-8a2e485acbcd" (UID: "ecf372fc-dfed-418f-904f-8a2e485acbcd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:58:55 crc kubenswrapper[4788]: I1211 09:58:55.904042 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:55 crc kubenswrapper[4788]: I1211 09:58:55.904108 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbt58\" (UniqueName: \"kubernetes.io/projected/ecf372fc-dfed-418f-904f-8a2e485acbcd-kube-api-access-qbt58\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:55 crc kubenswrapper[4788]: I1211 09:58:55.904124 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ecf372fc-dfed-418f-904f-8a2e485acbcd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.345167 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" event={"ID":"ecf372fc-dfed-418f-904f-8a2e485acbcd","Type":"ContainerDied","Data":"83d29c9f9ac9a6d86bf9ec3f465a922e1e888315d2a45e15fc228179254776a6"} Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.345649 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83d29c9f9ac9a6d86bf9ec3f465a922e1e888315d2a45e15fc228179254776a6" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.345215 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.451934 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8"] Dec 11 09:58:56 crc kubenswrapper[4788]: E1211 09:58:56.452521 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecf372fc-dfed-418f-904f-8a2e485acbcd" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.452550 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecf372fc-dfed-418f-904f-8a2e485acbcd" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.452793 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecf372fc-dfed-418f-904f-8a2e485acbcd" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.453701 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.457061 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.458817 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.459138 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.460543 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.460572 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.460918 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.461895 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.476630 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.478462 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8"] Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.517844 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.517935 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.517967 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.518012 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.518070 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.518209 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.518260 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.518291 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sk78\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-kube-api-access-8sk78\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.518497 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.518543 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.518627 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.518710 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.518753 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.518859 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621348 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621431 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621459 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sk78\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-kube-api-access-8sk78\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621503 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621537 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621580 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621624 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621661 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621724 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621793 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621821 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621857 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621898 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.621935 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.627751 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.628006 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.628533 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.628985 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.629991 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.630100 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.630304 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.631239 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.631420 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.633186 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.633951 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.638752 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.639548 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.647278 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sk78\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-kube-api-access-8sk78\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-cclp8\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:56 crc kubenswrapper[4788]: I1211 09:58:56.774636 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:58:57 crc kubenswrapper[4788]: I1211 09:58:57.348729 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8"] Dec 11 09:58:58 crc kubenswrapper[4788]: I1211 09:58:58.373148 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" event={"ID":"ab20916c-42ba-431e-af33-cf55f453378e","Type":"ContainerStarted","Data":"9c0797060d17e01831d57daca5708e70cfe87a0473b0220a49538eb40f295493"} Dec 11 09:58:58 crc kubenswrapper[4788]: I1211 09:58:58.374948 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" event={"ID":"ab20916c-42ba-431e-af33-cf55f453378e","Type":"ContainerStarted","Data":"b36d54ca4af305d01efea0f4a4d1270cd0a9861e3f35643e9a65f8a4ea199f88"} Dec 11 09:58:58 crc kubenswrapper[4788]: I1211 09:58:58.409067 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" podStartSLOduration=2.03307377 podStartE2EDuration="2.40903638s" podCreationTimestamp="2025-12-11 09:58:56 +0000 UTC" firstStartedPulling="2025-12-11 09:58:57.357891279 +0000 UTC m=+2267.428670875" lastFinishedPulling="2025-12-11 09:58:57.733853899 +0000 UTC m=+2267.804633485" observedRunningTime="2025-12-11 09:58:58.403345886 +0000 UTC m=+2268.474125482" watchObservedRunningTime="2025-12-11 09:58:58.40903638 +0000 UTC m=+2268.479815966" Dec 11 09:59:21 crc kubenswrapper[4788]: I1211 09:59:21.369845 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 09:59:21 crc kubenswrapper[4788]: I1211 09:59:21.370444 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 09:59:21 crc kubenswrapper[4788]: I1211 09:59:21.370493 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 09:59:21 crc kubenswrapper[4788]: I1211 09:59:21.371406 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 09:59:21 crc kubenswrapper[4788]: I1211 09:59:21.371466 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" gracePeriod=600 Dec 11 09:59:21 crc kubenswrapper[4788]: E1211 09:59:21.494714 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:59:21 crc kubenswrapper[4788]: I1211 09:59:21.635310 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" exitCode=0 Dec 11 09:59:21 crc kubenswrapper[4788]: I1211 09:59:21.635370 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3"} Dec 11 09:59:21 crc kubenswrapper[4788]: I1211 09:59:21.635419 4788 scope.go:117] "RemoveContainer" containerID="bbbc1e80b9d3d5c0ee1a3b47244845f7033d53a0a229c5bae77d253cde3079f8" Dec 11 09:59:21 crc kubenswrapper[4788]: I1211 09:59:21.636345 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 09:59:21 crc kubenswrapper[4788]: E1211 09:59:21.636679 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:59:35 crc kubenswrapper[4788]: I1211 09:59:35.495690 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 09:59:35 crc kubenswrapper[4788]: E1211 09:59:35.496504 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 09:59:37 crc kubenswrapper[4788]: I1211 09:59:37.956625 4788 generic.go:334] "Generic (PLEG): container finished" podID="ab20916c-42ba-431e-af33-cf55f453378e" containerID="9c0797060d17e01831d57daca5708e70cfe87a0473b0220a49538eb40f295493" exitCode=0 Dec 11 09:59:37 crc kubenswrapper[4788]: I1211 09:59:37.956727 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" event={"ID":"ab20916c-42ba-431e-af33-cf55f453378e","Type":"ContainerDied","Data":"9c0797060d17e01831d57daca5708e70cfe87a0473b0220a49538eb40f295493"} Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.535286 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597266 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ovn-combined-ca-bundle\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597360 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597391 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-bootstrap-combined-ca-bundle\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597433 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sk78\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-kube-api-access-8sk78\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597456 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-telemetry-combined-ca-bundle\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597486 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-libvirt-combined-ca-bundle\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597553 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-repo-setup-combined-ca-bundle\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597615 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-inventory\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597649 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597688 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597736 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ssh-key\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597763 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-nova-combined-ca-bundle\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597792 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-neutron-metadata-combined-ca-bundle\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.597824 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"ab20916c-42ba-431e-af33-cf55f453378e\" (UID: \"ab20916c-42ba-431e-af33-cf55f453378e\") " Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.607163 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.607465 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.607987 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.608704 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.610639 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.611895 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.611997 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.612040 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.617648 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.617714 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.617685 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.617847 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-kube-api-access-8sk78" (OuterVolumeSpecName: "kube-api-access-8sk78") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "kube-api-access-8sk78". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.640886 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.644810 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-inventory" (OuterVolumeSpecName: "inventory") pod "ab20916c-42ba-431e-af33-cf55f453378e" (UID: "ab20916c-42ba-431e-af33-cf55f453378e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701115 4788 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701179 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701195 4788 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701209 4788 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701242 4788 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701259 4788 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701272 4788 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701284 4788 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701296 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sk78\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-kube-api-access-8sk78\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701307 4788 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701321 4788 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701334 4788 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701346 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab20916c-42ba-431e-af33-cf55f453378e-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.701358 4788 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ab20916c-42ba-431e-af33-cf55f453378e-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.977525 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" event={"ID":"ab20916c-42ba-431e-af33-cf55f453378e","Type":"ContainerDied","Data":"b36d54ca4af305d01efea0f4a4d1270cd0a9861e3f35643e9a65f8a4ea199f88"} Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.977580 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-cclp8" Dec 11 09:59:39 crc kubenswrapper[4788]: I1211 09:59:39.977584 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b36d54ca4af305d01efea0f4a4d1270cd0a9861e3f35643e9a65f8a4ea199f88" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.077198 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs"] Dec 11 09:59:40 crc kubenswrapper[4788]: E1211 09:59:40.077697 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab20916c-42ba-431e-af33-cf55f453378e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.077717 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab20916c-42ba-431e-af33-cf55f453378e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.077948 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab20916c-42ba-431e-af33-cf55f453378e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.078639 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.081458 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.081471 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.082035 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.088286 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.088578 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.089379 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs"] Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.215058 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.215483 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.215728 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.215854 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr8bm\" (UniqueName: \"kubernetes.io/projected/6de52d95-9e8d-402b-9fd3-3431a58a61e8-kube-api-access-nr8bm\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.216064 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.318242 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.318318 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.318369 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.318457 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.318484 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr8bm\" (UniqueName: \"kubernetes.io/projected/6de52d95-9e8d-402b-9fd3-3431a58a61e8-kube-api-access-nr8bm\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.320350 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.323633 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.327301 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.333958 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.338925 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr8bm\" (UniqueName: \"kubernetes.io/projected/6de52d95-9e8d-402b-9fd3-3431a58a61e8-kube-api-access-nr8bm\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-wh8qs\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.398093 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.936631 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs"] Dec 11 09:59:40 crc kubenswrapper[4788]: I1211 09:59:40.986993 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" event={"ID":"6de52d95-9e8d-402b-9fd3-3431a58a61e8","Type":"ContainerStarted","Data":"7a723b37b3d21fec45febdb3392d1ce177680138ed434a4e5009f828cb8243da"} Dec 11 09:59:44 crc kubenswrapper[4788]: I1211 09:59:44.013094 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" event={"ID":"6de52d95-9e8d-402b-9fd3-3431a58a61e8","Type":"ContainerStarted","Data":"38a65831cd07cbef06aa85114ac1bacddd3d1679e9a97bc960c6aae53608e02d"} Dec 11 09:59:45 crc kubenswrapper[4788]: I1211 09:59:45.039917 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" podStartSLOduration=3.433383972 podStartE2EDuration="5.039897425s" podCreationTimestamp="2025-12-11 09:59:40 +0000 UTC" firstStartedPulling="2025-12-11 09:59:40.943599658 +0000 UTC m=+2311.014379244" lastFinishedPulling="2025-12-11 09:59:42.550113111 +0000 UTC m=+2312.620892697" observedRunningTime="2025-12-11 09:59:45.039108615 +0000 UTC m=+2315.109888201" watchObservedRunningTime="2025-12-11 09:59:45.039897425 +0000 UTC m=+2315.110677001" Dec 11 09:59:49 crc kubenswrapper[4788]: I1211 09:59:49.496149 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 09:59:49 crc kubenswrapper[4788]: E1211 09:59:49.496950 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.153075 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k"] Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.155536 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.159218 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.159258 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.168333 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k"] Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.260626 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/30ed2403-2cfc-4121-a582-429b1c3443e4-secret-volume\") pod \"collect-profiles-29424120-mw57k\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.260690 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp2jl\" (UniqueName: \"kubernetes.io/projected/30ed2403-2cfc-4121-a582-429b1c3443e4-kube-api-access-vp2jl\") pod \"collect-profiles-29424120-mw57k\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.260824 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/30ed2403-2cfc-4121-a582-429b1c3443e4-config-volume\") pod \"collect-profiles-29424120-mw57k\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.362838 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/30ed2403-2cfc-4121-a582-429b1c3443e4-secret-volume\") pod \"collect-profiles-29424120-mw57k\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.362890 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp2jl\" (UniqueName: \"kubernetes.io/projected/30ed2403-2cfc-4121-a582-429b1c3443e4-kube-api-access-vp2jl\") pod \"collect-profiles-29424120-mw57k\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.362968 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/30ed2403-2cfc-4121-a582-429b1c3443e4-config-volume\") pod \"collect-profiles-29424120-mw57k\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.364070 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/30ed2403-2cfc-4121-a582-429b1c3443e4-config-volume\") pod \"collect-profiles-29424120-mw57k\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.370619 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/30ed2403-2cfc-4121-a582-429b1c3443e4-secret-volume\") pod \"collect-profiles-29424120-mw57k\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.383037 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp2jl\" (UniqueName: \"kubernetes.io/projected/30ed2403-2cfc-4121-a582-429b1c3443e4-kube-api-access-vp2jl\") pod \"collect-profiles-29424120-mw57k\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.491498 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:00 crc kubenswrapper[4788]: I1211 10:00:00.970423 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k"] Dec 11 10:00:01 crc kubenswrapper[4788]: I1211 10:00:01.199970 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" event={"ID":"30ed2403-2cfc-4121-a582-429b1c3443e4","Type":"ContainerStarted","Data":"80b366b4f3238d3c0785fe45f1bd1eb04969d8b6c2425dc023b9b1487ae304ad"} Dec 11 10:00:01 crc kubenswrapper[4788]: I1211 10:00:01.495729 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:00:01 crc kubenswrapper[4788]: E1211 10:00:01.496118 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:00:02 crc kubenswrapper[4788]: I1211 10:00:02.222527 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" event={"ID":"30ed2403-2cfc-4121-a582-429b1c3443e4","Type":"ContainerStarted","Data":"d28e374c8a0a97bef6392267803537c64cda65d12e55519ff776af414dbc9c6a"} Dec 11 10:00:02 crc kubenswrapper[4788]: I1211 10:00:02.249505 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" podStartSLOduration=2.2494660619999998 podStartE2EDuration="2.249466062s" podCreationTimestamp="2025-12-11 10:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:00:02.243082341 +0000 UTC m=+2332.313861947" watchObservedRunningTime="2025-12-11 10:00:02.249466062 +0000 UTC m=+2332.320245648" Dec 11 10:00:03 crc kubenswrapper[4788]: I1211 10:00:03.236142 4788 generic.go:334] "Generic (PLEG): container finished" podID="30ed2403-2cfc-4121-a582-429b1c3443e4" containerID="d28e374c8a0a97bef6392267803537c64cda65d12e55519ff776af414dbc9c6a" exitCode=0 Dec 11 10:00:03 crc kubenswrapper[4788]: I1211 10:00:03.236197 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" event={"ID":"30ed2403-2cfc-4121-a582-429b1c3443e4","Type":"ContainerDied","Data":"d28e374c8a0a97bef6392267803537c64cda65d12e55519ff776af414dbc9c6a"} Dec 11 10:00:04 crc kubenswrapper[4788]: I1211 10:00:04.622213 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:04 crc kubenswrapper[4788]: I1211 10:00:04.778362 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/30ed2403-2cfc-4121-a582-429b1c3443e4-config-volume\") pod \"30ed2403-2cfc-4121-a582-429b1c3443e4\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " Dec 11 10:00:04 crc kubenswrapper[4788]: I1211 10:00:04.778516 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vp2jl\" (UniqueName: \"kubernetes.io/projected/30ed2403-2cfc-4121-a582-429b1c3443e4-kube-api-access-vp2jl\") pod \"30ed2403-2cfc-4121-a582-429b1c3443e4\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " Dec 11 10:00:04 crc kubenswrapper[4788]: I1211 10:00:04.778556 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/30ed2403-2cfc-4121-a582-429b1c3443e4-secret-volume\") pod \"30ed2403-2cfc-4121-a582-429b1c3443e4\" (UID: \"30ed2403-2cfc-4121-a582-429b1c3443e4\") " Dec 11 10:00:04 crc kubenswrapper[4788]: I1211 10:00:04.779484 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30ed2403-2cfc-4121-a582-429b1c3443e4-config-volume" (OuterVolumeSpecName: "config-volume") pod "30ed2403-2cfc-4121-a582-429b1c3443e4" (UID: "30ed2403-2cfc-4121-a582-429b1c3443e4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:00:04 crc kubenswrapper[4788]: I1211 10:00:04.793358 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ed2403-2cfc-4121-a582-429b1c3443e4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "30ed2403-2cfc-4121-a582-429b1c3443e4" (UID: "30ed2403-2cfc-4121-a582-429b1c3443e4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:00:04 crc kubenswrapper[4788]: I1211 10:00:04.793363 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30ed2403-2cfc-4121-a582-429b1c3443e4-kube-api-access-vp2jl" (OuterVolumeSpecName: "kube-api-access-vp2jl") pod "30ed2403-2cfc-4121-a582-429b1c3443e4" (UID: "30ed2403-2cfc-4121-a582-429b1c3443e4"). InnerVolumeSpecName "kube-api-access-vp2jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:00:04 crc kubenswrapper[4788]: I1211 10:00:04.881139 4788 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/30ed2403-2cfc-4121-a582-429b1c3443e4-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:00:04 crc kubenswrapper[4788]: I1211 10:00:04.881185 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vp2jl\" (UniqueName: \"kubernetes.io/projected/30ed2403-2cfc-4121-a582-429b1c3443e4-kube-api-access-vp2jl\") on node \"crc\" DevicePath \"\"" Dec 11 10:00:04 crc kubenswrapper[4788]: I1211 10:00:04.881205 4788 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/30ed2403-2cfc-4121-a582-429b1c3443e4-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:00:05 crc kubenswrapper[4788]: I1211 10:00:05.274457 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" event={"ID":"30ed2403-2cfc-4121-a582-429b1c3443e4","Type":"ContainerDied","Data":"80b366b4f3238d3c0785fe45f1bd1eb04969d8b6c2425dc023b9b1487ae304ad"} Dec 11 10:00:05 crc kubenswrapper[4788]: I1211 10:00:05.274511 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80b366b4f3238d3c0785fe45f1bd1eb04969d8b6c2425dc023b9b1487ae304ad" Dec 11 10:00:05 crc kubenswrapper[4788]: I1211 10:00:05.274583 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k" Dec 11 10:00:05 crc kubenswrapper[4788]: I1211 10:00:05.329303 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps"] Dec 11 10:00:05 crc kubenswrapper[4788]: I1211 10:00:05.340351 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424075-kgnps"] Dec 11 10:00:06 crc kubenswrapper[4788]: I1211 10:00:06.518862 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d" path="/var/lib/kubelet/pods/b0eaac6d-9ef7-498a-b0c9-ece1aa5eb47d/volumes" Dec 11 10:00:13 crc kubenswrapper[4788]: I1211 10:00:13.495608 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:00:13 crc kubenswrapper[4788]: E1211 10:00:13.496560 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:00:27 crc kubenswrapper[4788]: I1211 10:00:27.496455 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:00:27 crc kubenswrapper[4788]: E1211 10:00:27.497214 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:00:39 crc kubenswrapper[4788]: I1211 10:00:39.495309 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:00:39 crc kubenswrapper[4788]: E1211 10:00:39.496042 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:00:46 crc kubenswrapper[4788]: I1211 10:00:46.491665 4788 scope.go:117] "RemoveContainer" containerID="e517087aef58e5f301cf2f199b292e2c9d1e76412639cc1cf1e35e15c4fee6a6" Dec 11 10:00:50 crc kubenswrapper[4788]: I1211 10:00:50.695341 4788 generic.go:334] "Generic (PLEG): container finished" podID="6de52d95-9e8d-402b-9fd3-3431a58a61e8" containerID="38a65831cd07cbef06aa85114ac1bacddd3d1679e9a97bc960c6aae53608e02d" exitCode=0 Dec 11 10:00:50 crc kubenswrapper[4788]: I1211 10:00:50.695396 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" event={"ID":"6de52d95-9e8d-402b-9fd3-3431a58a61e8","Type":"ContainerDied","Data":"38a65831cd07cbef06aa85114ac1bacddd3d1679e9a97bc960c6aae53608e02d"} Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.133297 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.234469 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovncontroller-config-0\") pod \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.234598 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-inventory\") pod \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.234634 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ssh-key\") pod \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.234805 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nr8bm\" (UniqueName: \"kubernetes.io/projected/6de52d95-9e8d-402b-9fd3-3431a58a61e8-kube-api-access-nr8bm\") pod \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.234891 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovn-combined-ca-bundle\") pod \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\" (UID: \"6de52d95-9e8d-402b-9fd3-3431a58a61e8\") " Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.241038 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "6de52d95-9e8d-402b-9fd3-3431a58a61e8" (UID: "6de52d95-9e8d-402b-9fd3-3431a58a61e8"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.241409 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6de52d95-9e8d-402b-9fd3-3431a58a61e8-kube-api-access-nr8bm" (OuterVolumeSpecName: "kube-api-access-nr8bm") pod "6de52d95-9e8d-402b-9fd3-3431a58a61e8" (UID: "6de52d95-9e8d-402b-9fd3-3431a58a61e8"). InnerVolumeSpecName "kube-api-access-nr8bm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.264457 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "6de52d95-9e8d-402b-9fd3-3431a58a61e8" (UID: "6de52d95-9e8d-402b-9fd3-3431a58a61e8"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.265790 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6de52d95-9e8d-402b-9fd3-3431a58a61e8" (UID: "6de52d95-9e8d-402b-9fd3-3431a58a61e8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.268482 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-inventory" (OuterVolumeSpecName: "inventory") pod "6de52d95-9e8d-402b-9fd3-3431a58a61e8" (UID: "6de52d95-9e8d-402b-9fd3-3431a58a61e8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.337155 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.337200 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.337215 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nr8bm\" (UniqueName: \"kubernetes.io/projected/6de52d95-9e8d-402b-9fd3-3431a58a61e8-kube-api-access-nr8bm\") on node \"crc\" DevicePath \"\"" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.337237 4788 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.337248 4788 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/6de52d95-9e8d-402b-9fd3-3431a58a61e8-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.497049 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:00:52 crc kubenswrapper[4788]: E1211 10:00:52.497431 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.714911 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" event={"ID":"6de52d95-9e8d-402b-9fd3-3431a58a61e8","Type":"ContainerDied","Data":"7a723b37b3d21fec45febdb3392d1ce177680138ed434a4e5009f828cb8243da"} Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.714953 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-wh8qs" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.714966 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a723b37b3d21fec45febdb3392d1ce177680138ed434a4e5009f828cb8243da" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.821598 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl"] Dec 11 10:00:52 crc kubenswrapper[4788]: E1211 10:00:52.822080 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ed2403-2cfc-4121-a582-429b1c3443e4" containerName="collect-profiles" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.822103 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ed2403-2cfc-4121-a582-429b1c3443e4" containerName="collect-profiles" Dec 11 10:00:52 crc kubenswrapper[4788]: E1211 10:00:52.822138 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6de52d95-9e8d-402b-9fd3-3431a58a61e8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.822145 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="6de52d95-9e8d-402b-9fd3-3431a58a61e8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.822374 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="6de52d95-9e8d-402b-9fd3-3431a58a61e8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.822408 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="30ed2403-2cfc-4121-a582-429b1c3443e4" containerName="collect-profiles" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.823694 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.826966 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.827034 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.827051 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.827084 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.827322 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.828108 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.833662 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl"] Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.956200 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.956325 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.956349 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q95gr\" (UniqueName: \"kubernetes.io/projected/625e8b87-3138-45b9-935d-d26e22240053-kube-api-access-q95gr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.956373 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.956467 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:52 crc kubenswrapper[4788]: I1211 10:00:52.956631 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.058442 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.058570 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.058610 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q95gr\" (UniqueName: \"kubernetes.io/projected/625e8b87-3138-45b9-935d-d26e22240053-kube-api-access-q95gr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.058648 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.058731 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.058763 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.063732 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.064302 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.064401 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.064726 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.067293 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.079083 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q95gr\" (UniqueName: \"kubernetes.io/projected/625e8b87-3138-45b9-935d-d26e22240053-kube-api-access-q95gr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.142784 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.712087 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl"] Dec 11 10:00:53 crc kubenswrapper[4788]: I1211 10:00:53.723355 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:00:54 crc kubenswrapper[4788]: I1211 10:00:54.765689 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" event={"ID":"625e8b87-3138-45b9-935d-d26e22240053","Type":"ContainerStarted","Data":"e75c2c714fa8c9507c3b772e6b47545797fda5f449b9db96be9f734386c39eac"} Dec 11 10:00:54 crc kubenswrapper[4788]: I1211 10:00:54.766078 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" event={"ID":"625e8b87-3138-45b9-935d-d26e22240053","Type":"ContainerStarted","Data":"dcfa907c4304d34f307b95e6dd9d791ec2a33d9d3df822888c63a0f708925623"} Dec 11 10:00:54 crc kubenswrapper[4788]: I1211 10:00:54.783314 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" podStartSLOduration=2.58799075 podStartE2EDuration="2.783275604s" podCreationTimestamp="2025-12-11 10:00:52 +0000 UTC" firstStartedPulling="2025-12-11 10:00:53.722019728 +0000 UTC m=+2383.792799314" lastFinishedPulling="2025-12-11 10:00:53.917304582 +0000 UTC m=+2383.988084168" observedRunningTime="2025-12-11 10:00:54.781389056 +0000 UTC m=+2384.852168632" watchObservedRunningTime="2025-12-11 10:00:54.783275604 +0000 UTC m=+2384.854055180" Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.663784 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v6h9w"] Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.667426 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.697014 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v6h9w"] Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.838822 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-catalog-content\") pod \"community-operators-v6h9w\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.838982 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-utilities\") pod \"community-operators-v6h9w\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.839112 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbwht\" (UniqueName: \"kubernetes.io/projected/6809b09e-b41b-47ad-8a69-71b49b869d0d-kube-api-access-wbwht\") pod \"community-operators-v6h9w\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.940933 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-catalog-content\") pod \"community-operators-v6h9w\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.941045 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-utilities\") pod \"community-operators-v6h9w\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.941126 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbwht\" (UniqueName: \"kubernetes.io/projected/6809b09e-b41b-47ad-8a69-71b49b869d0d-kube-api-access-wbwht\") pod \"community-operators-v6h9w\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.941628 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-catalog-content\") pod \"community-operators-v6h9w\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.941774 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-utilities\") pod \"community-operators-v6h9w\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.963585 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbwht\" (UniqueName: \"kubernetes.io/projected/6809b09e-b41b-47ad-8a69-71b49b869d0d-kube-api-access-wbwht\") pod \"community-operators-v6h9w\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:00:58 crc kubenswrapper[4788]: I1211 10:00:58.994414 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:00:59 crc kubenswrapper[4788]: I1211 10:00:59.648292 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v6h9w"] Dec 11 10:00:59 crc kubenswrapper[4788]: W1211 10:00:59.667066 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6809b09e_b41b_47ad_8a69_71b49b869d0d.slice/crio-1039f429fe97e0f2c397a6af4abc1262ad36fb56150541b7b3ef629eca539fcb WatchSource:0}: Error finding container 1039f429fe97e0f2c397a6af4abc1262ad36fb56150541b7b3ef629eca539fcb: Status 404 returned error can't find the container with id 1039f429fe97e0f2c397a6af4abc1262ad36fb56150541b7b3ef629eca539fcb Dec 11 10:00:59 crc kubenswrapper[4788]: I1211 10:00:59.815696 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6h9w" event={"ID":"6809b09e-b41b-47ad-8a69-71b49b869d0d","Type":"ContainerStarted","Data":"1039f429fe97e0f2c397a6af4abc1262ad36fb56150541b7b3ef629eca539fcb"} Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.144022 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29424121-g2d82"] Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.146157 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.178157 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29424121-g2d82"] Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.267532 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-fernet-keys\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.267605 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-config-data\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.268251 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-combined-ca-bundle\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.268370 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jsgr\" (UniqueName: \"kubernetes.io/projected/f1b95b41-dc96-4424-84a4-3c647a46ef4e-kube-api-access-4jsgr\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.370444 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jsgr\" (UniqueName: \"kubernetes.io/projected/f1b95b41-dc96-4424-84a4-3c647a46ef4e-kube-api-access-4jsgr\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.370576 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-fernet-keys\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.370611 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-config-data\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.370684 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-combined-ca-bundle\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.378381 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-config-data\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.378953 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-fernet-keys\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.384188 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-combined-ca-bundle\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.390967 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jsgr\" (UniqueName: \"kubernetes.io/projected/f1b95b41-dc96-4424-84a4-3c647a46ef4e-kube-api-access-4jsgr\") pod \"keystone-cron-29424121-g2d82\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.474180 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.831568 4788 generic.go:334] "Generic (PLEG): container finished" podID="6809b09e-b41b-47ad-8a69-71b49b869d0d" containerID="a759fd35354b92e52a89bc113b0646180ebb426af7f5b1eea936bb0cf6c5c377" exitCode=0 Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.831852 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6h9w" event={"ID":"6809b09e-b41b-47ad-8a69-71b49b869d0d","Type":"ContainerDied","Data":"a759fd35354b92e52a89bc113b0646180ebb426af7f5b1eea936bb0cf6c5c377"} Dec 11 10:01:00 crc kubenswrapper[4788]: I1211 10:01:00.965907 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29424121-g2d82"] Dec 11 10:01:01 crc kubenswrapper[4788]: I1211 10:01:01.852596 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29424121-g2d82" event={"ID":"f1b95b41-dc96-4424-84a4-3c647a46ef4e","Type":"ContainerStarted","Data":"602337aa780fbe5804735cfa93989e279d6951cb698bc227934830d9a093dccf"} Dec 11 10:01:01 crc kubenswrapper[4788]: I1211 10:01:01.852924 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29424121-g2d82" event={"ID":"f1b95b41-dc96-4424-84a4-3c647a46ef4e","Type":"ContainerStarted","Data":"8f72af43883294278aacd68caf6f25287852586fa315da324a827f750f1a4048"} Dec 11 10:01:01 crc kubenswrapper[4788]: I1211 10:01:01.893123 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29424121-g2d82" podStartSLOduration=1.8930977580000001 podStartE2EDuration="1.893097758s" podCreationTimestamp="2025-12-11 10:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:01:01.88450168 +0000 UTC m=+2391.955281276" watchObservedRunningTime="2025-12-11 10:01:01.893097758 +0000 UTC m=+2391.963877344" Dec 11 10:01:02 crc kubenswrapper[4788]: I1211 10:01:02.866098 4788 generic.go:334] "Generic (PLEG): container finished" podID="6809b09e-b41b-47ad-8a69-71b49b869d0d" containerID="d24e286dfa5b2a850713017dd028a1a31033077c0c61f8420ac627e76a665281" exitCode=0 Dec 11 10:01:02 crc kubenswrapper[4788]: I1211 10:01:02.867331 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6h9w" event={"ID":"6809b09e-b41b-47ad-8a69-71b49b869d0d","Type":"ContainerDied","Data":"d24e286dfa5b2a850713017dd028a1a31033077c0c61f8420ac627e76a665281"} Dec 11 10:01:04 crc kubenswrapper[4788]: I1211 10:01:04.888684 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6h9w" event={"ID":"6809b09e-b41b-47ad-8a69-71b49b869d0d","Type":"ContainerStarted","Data":"918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a"} Dec 11 10:01:04 crc kubenswrapper[4788]: I1211 10:01:04.892068 4788 generic.go:334] "Generic (PLEG): container finished" podID="f1b95b41-dc96-4424-84a4-3c647a46ef4e" containerID="602337aa780fbe5804735cfa93989e279d6951cb698bc227934830d9a093dccf" exitCode=0 Dec 11 10:01:04 crc kubenswrapper[4788]: I1211 10:01:04.892129 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29424121-g2d82" event={"ID":"f1b95b41-dc96-4424-84a4-3c647a46ef4e","Type":"ContainerDied","Data":"602337aa780fbe5804735cfa93989e279d6951cb698bc227934830d9a093dccf"} Dec 11 10:01:04 crc kubenswrapper[4788]: I1211 10:01:04.922114 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v6h9w" podStartSLOduration=3.825352006 podStartE2EDuration="6.922087365s" podCreationTimestamp="2025-12-11 10:00:58 +0000 UTC" firstStartedPulling="2025-12-11 10:01:00.835401131 +0000 UTC m=+2390.906180707" lastFinishedPulling="2025-12-11 10:01:03.93213648 +0000 UTC m=+2394.002916066" observedRunningTime="2025-12-11 10:01:04.916916184 +0000 UTC m=+2394.987695780" watchObservedRunningTime="2025-12-11 10:01:04.922087365 +0000 UTC m=+2394.992866951" Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.336840 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.417112 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jsgr\" (UniqueName: \"kubernetes.io/projected/f1b95b41-dc96-4424-84a4-3c647a46ef4e-kube-api-access-4jsgr\") pod \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.417271 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-config-data\") pod \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.417952 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-combined-ca-bundle\") pod \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.425410 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1b95b41-dc96-4424-84a4-3c647a46ef4e-kube-api-access-4jsgr" (OuterVolumeSpecName: "kube-api-access-4jsgr") pod "f1b95b41-dc96-4424-84a4-3c647a46ef4e" (UID: "f1b95b41-dc96-4424-84a4-3c647a46ef4e"). InnerVolumeSpecName "kube-api-access-4jsgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.448405 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1b95b41-dc96-4424-84a4-3c647a46ef4e" (UID: "f1b95b41-dc96-4424-84a4-3c647a46ef4e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.506582 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-config-data" (OuterVolumeSpecName: "config-data") pod "f1b95b41-dc96-4424-84a4-3c647a46ef4e" (UID: "f1b95b41-dc96-4424-84a4-3c647a46ef4e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.521400 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-fernet-keys\") pod \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\" (UID: \"f1b95b41-dc96-4424-84a4-3c647a46ef4e\") " Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.522657 4788 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.522685 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jsgr\" (UniqueName: \"kubernetes.io/projected/f1b95b41-dc96-4424-84a4-3c647a46ef4e-kube-api-access-4jsgr\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.522700 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.524992 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f1b95b41-dc96-4424-84a4-3c647a46ef4e" (UID: "f1b95b41-dc96-4424-84a4-3c647a46ef4e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.624968 4788 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f1b95b41-dc96-4424-84a4-3c647a46ef4e-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.913917 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29424121-g2d82" event={"ID":"f1b95b41-dc96-4424-84a4-3c647a46ef4e","Type":"ContainerDied","Data":"8f72af43883294278aacd68caf6f25287852586fa315da324a827f750f1a4048"} Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.913979 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f72af43883294278aacd68caf6f25287852586fa315da324a827f750f1a4048" Dec 11 10:01:06 crc kubenswrapper[4788]: I1211 10:01:06.914056 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29424121-g2d82" Dec 11 10:01:07 crc kubenswrapper[4788]: I1211 10:01:07.496190 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:01:07 crc kubenswrapper[4788]: E1211 10:01:07.496759 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:01:08 crc kubenswrapper[4788]: I1211 10:01:08.995457 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:01:08 crc kubenswrapper[4788]: I1211 10:01:08.995754 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:01:09 crc kubenswrapper[4788]: I1211 10:01:09.051681 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:01:10 crc kubenswrapper[4788]: I1211 10:01:10.016105 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:01:10 crc kubenswrapper[4788]: I1211 10:01:10.085173 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v6h9w"] Dec 11 10:01:11 crc kubenswrapper[4788]: I1211 10:01:11.976987 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v6h9w" podUID="6809b09e-b41b-47ad-8a69-71b49b869d0d" containerName="registry-server" containerID="cri-o://918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a" gracePeriod=2 Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.486887 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.648450 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbwht\" (UniqueName: \"kubernetes.io/projected/6809b09e-b41b-47ad-8a69-71b49b869d0d-kube-api-access-wbwht\") pod \"6809b09e-b41b-47ad-8a69-71b49b869d0d\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.648554 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-catalog-content\") pod \"6809b09e-b41b-47ad-8a69-71b49b869d0d\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.648660 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-utilities\") pod \"6809b09e-b41b-47ad-8a69-71b49b869d0d\" (UID: \"6809b09e-b41b-47ad-8a69-71b49b869d0d\") " Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.650089 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-utilities" (OuterVolumeSpecName: "utilities") pod "6809b09e-b41b-47ad-8a69-71b49b869d0d" (UID: "6809b09e-b41b-47ad-8a69-71b49b869d0d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.655715 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6809b09e-b41b-47ad-8a69-71b49b869d0d-kube-api-access-wbwht" (OuterVolumeSpecName: "kube-api-access-wbwht") pod "6809b09e-b41b-47ad-8a69-71b49b869d0d" (UID: "6809b09e-b41b-47ad-8a69-71b49b869d0d"). InnerVolumeSpecName "kube-api-access-wbwht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.752939 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.753305 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbwht\" (UniqueName: \"kubernetes.io/projected/6809b09e-b41b-47ad-8a69-71b49b869d0d-kube-api-access-wbwht\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.948604 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6809b09e-b41b-47ad-8a69-71b49b869d0d" (UID: "6809b09e-b41b-47ad-8a69-71b49b869d0d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.957705 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6809b09e-b41b-47ad-8a69-71b49b869d0d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.991484 4788 generic.go:334] "Generic (PLEG): container finished" podID="6809b09e-b41b-47ad-8a69-71b49b869d0d" containerID="918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a" exitCode=0 Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.991539 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6h9w" event={"ID":"6809b09e-b41b-47ad-8a69-71b49b869d0d","Type":"ContainerDied","Data":"918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a"} Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.991572 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v6h9w" Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.991582 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v6h9w" event={"ID":"6809b09e-b41b-47ad-8a69-71b49b869d0d","Type":"ContainerDied","Data":"1039f429fe97e0f2c397a6af4abc1262ad36fb56150541b7b3ef629eca539fcb"} Dec 11 10:01:12 crc kubenswrapper[4788]: I1211 10:01:12.991605 4788 scope.go:117] "RemoveContainer" containerID="918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a" Dec 11 10:01:13 crc kubenswrapper[4788]: I1211 10:01:13.016768 4788 scope.go:117] "RemoveContainer" containerID="d24e286dfa5b2a850713017dd028a1a31033077c0c61f8420ac627e76a665281" Dec 11 10:01:13 crc kubenswrapper[4788]: I1211 10:01:13.032437 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v6h9w"] Dec 11 10:01:13 crc kubenswrapper[4788]: I1211 10:01:13.044924 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v6h9w"] Dec 11 10:01:13 crc kubenswrapper[4788]: I1211 10:01:13.053878 4788 scope.go:117] "RemoveContainer" containerID="a759fd35354b92e52a89bc113b0646180ebb426af7f5b1eea936bb0cf6c5c377" Dec 11 10:01:13 crc kubenswrapper[4788]: I1211 10:01:13.093058 4788 scope.go:117] "RemoveContainer" containerID="918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a" Dec 11 10:01:13 crc kubenswrapper[4788]: E1211 10:01:13.093738 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a\": container with ID starting with 918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a not found: ID does not exist" containerID="918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a" Dec 11 10:01:13 crc kubenswrapper[4788]: I1211 10:01:13.093809 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a"} err="failed to get container status \"918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a\": rpc error: code = NotFound desc = could not find container \"918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a\": container with ID starting with 918c8441b88d356472641ce8011f4c9c37c2d2a2eafbc5c9f5ab48222beba52a not found: ID does not exist" Dec 11 10:01:13 crc kubenswrapper[4788]: I1211 10:01:13.093848 4788 scope.go:117] "RemoveContainer" containerID="d24e286dfa5b2a850713017dd028a1a31033077c0c61f8420ac627e76a665281" Dec 11 10:01:13 crc kubenswrapper[4788]: E1211 10:01:13.094720 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d24e286dfa5b2a850713017dd028a1a31033077c0c61f8420ac627e76a665281\": container with ID starting with d24e286dfa5b2a850713017dd028a1a31033077c0c61f8420ac627e76a665281 not found: ID does not exist" containerID="d24e286dfa5b2a850713017dd028a1a31033077c0c61f8420ac627e76a665281" Dec 11 10:01:13 crc kubenswrapper[4788]: I1211 10:01:13.094874 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d24e286dfa5b2a850713017dd028a1a31033077c0c61f8420ac627e76a665281"} err="failed to get container status \"d24e286dfa5b2a850713017dd028a1a31033077c0c61f8420ac627e76a665281\": rpc error: code = NotFound desc = could not find container \"d24e286dfa5b2a850713017dd028a1a31033077c0c61f8420ac627e76a665281\": container with ID starting with d24e286dfa5b2a850713017dd028a1a31033077c0c61f8420ac627e76a665281 not found: ID does not exist" Dec 11 10:01:13 crc kubenswrapper[4788]: I1211 10:01:13.094998 4788 scope.go:117] "RemoveContainer" containerID="a759fd35354b92e52a89bc113b0646180ebb426af7f5b1eea936bb0cf6c5c377" Dec 11 10:01:13 crc kubenswrapper[4788]: E1211 10:01:13.095544 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a759fd35354b92e52a89bc113b0646180ebb426af7f5b1eea936bb0cf6c5c377\": container with ID starting with a759fd35354b92e52a89bc113b0646180ebb426af7f5b1eea936bb0cf6c5c377 not found: ID does not exist" containerID="a759fd35354b92e52a89bc113b0646180ebb426af7f5b1eea936bb0cf6c5c377" Dec 11 10:01:13 crc kubenswrapper[4788]: I1211 10:01:13.095585 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a759fd35354b92e52a89bc113b0646180ebb426af7f5b1eea936bb0cf6c5c377"} err="failed to get container status \"a759fd35354b92e52a89bc113b0646180ebb426af7f5b1eea936bb0cf6c5c377\": rpc error: code = NotFound desc = could not find container \"a759fd35354b92e52a89bc113b0646180ebb426af7f5b1eea936bb0cf6c5c377\": container with ID starting with a759fd35354b92e52a89bc113b0646180ebb426af7f5b1eea936bb0cf6c5c377 not found: ID does not exist" Dec 11 10:01:14 crc kubenswrapper[4788]: I1211 10:01:14.506632 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6809b09e-b41b-47ad-8a69-71b49b869d0d" path="/var/lib/kubelet/pods/6809b09e-b41b-47ad-8a69-71b49b869d0d/volumes" Dec 11 10:01:22 crc kubenswrapper[4788]: I1211 10:01:22.495936 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:01:22 crc kubenswrapper[4788]: E1211 10:01:22.519710 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:01:35 crc kubenswrapper[4788]: I1211 10:01:35.495974 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:01:35 crc kubenswrapper[4788]: E1211 10:01:35.496979 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:01:46 crc kubenswrapper[4788]: I1211 10:01:46.573513 4788 scope.go:117] "RemoveContainer" containerID="96a4fa5ac39c9c49c9a0eb1113c9454baf0efed634f606e7c91648b1bad6ade1" Dec 11 10:01:46 crc kubenswrapper[4788]: I1211 10:01:46.609940 4788 scope.go:117] "RemoveContainer" containerID="dcbd1ebc550bce5cee9ae064abf624639213a7520690f286901580222986f3be" Dec 11 10:01:46 crc kubenswrapper[4788]: I1211 10:01:46.644456 4788 scope.go:117] "RemoveContainer" containerID="0d8ee2fb0c394d511ff4590a8d365722ea0d55f33b742930488a92fab1082643" Dec 11 10:01:47 crc kubenswrapper[4788]: I1211 10:01:47.359869 4788 generic.go:334] "Generic (PLEG): container finished" podID="625e8b87-3138-45b9-935d-d26e22240053" containerID="e75c2c714fa8c9507c3b772e6b47545797fda5f449b9db96be9f734386c39eac" exitCode=0 Dec 11 10:01:47 crc kubenswrapper[4788]: I1211 10:01:47.359965 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" event={"ID":"625e8b87-3138-45b9-935d-d26e22240053","Type":"ContainerDied","Data":"e75c2c714fa8c9507c3b772e6b47545797fda5f449b9db96be9f734386c39eac"} Dec 11 10:01:48 crc kubenswrapper[4788]: I1211 10:01:48.835367 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.037329 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-inventory\") pod \"625e8b87-3138-45b9-935d-d26e22240053\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.037425 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-ssh-key\") pod \"625e8b87-3138-45b9-935d-d26e22240053\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.038365 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-metadata-combined-ca-bundle\") pod \"625e8b87-3138-45b9-935d-d26e22240053\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.038643 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q95gr\" (UniqueName: \"kubernetes.io/projected/625e8b87-3138-45b9-935d-d26e22240053-kube-api-access-q95gr\") pod \"625e8b87-3138-45b9-935d-d26e22240053\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.038768 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-ovn-metadata-agent-neutron-config-0\") pod \"625e8b87-3138-45b9-935d-d26e22240053\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.038801 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-nova-metadata-neutron-config-0\") pod \"625e8b87-3138-45b9-935d-d26e22240053\" (UID: \"625e8b87-3138-45b9-935d-d26e22240053\") " Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.045198 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/625e8b87-3138-45b9-935d-d26e22240053-kube-api-access-q95gr" (OuterVolumeSpecName: "kube-api-access-q95gr") pod "625e8b87-3138-45b9-935d-d26e22240053" (UID: "625e8b87-3138-45b9-935d-d26e22240053"). InnerVolumeSpecName "kube-api-access-q95gr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.045808 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "625e8b87-3138-45b9-935d-d26e22240053" (UID: "625e8b87-3138-45b9-935d-d26e22240053"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.073842 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "625e8b87-3138-45b9-935d-d26e22240053" (UID: "625e8b87-3138-45b9-935d-d26e22240053"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.074685 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "625e8b87-3138-45b9-935d-d26e22240053" (UID: "625e8b87-3138-45b9-935d-d26e22240053"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.075062 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "625e8b87-3138-45b9-935d-d26e22240053" (UID: "625e8b87-3138-45b9-935d-d26e22240053"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.080198 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-inventory" (OuterVolumeSpecName: "inventory") pod "625e8b87-3138-45b9-935d-d26e22240053" (UID: "625e8b87-3138-45b9-935d-d26e22240053"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.141692 4788 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.141783 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q95gr\" (UniqueName: \"kubernetes.io/projected/625e8b87-3138-45b9-935d-d26e22240053-kube-api-access-q95gr\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.141821 4788 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.141833 4788 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.141843 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.141850 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/625e8b87-3138-45b9-935d-d26e22240053-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.379555 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" event={"ID":"625e8b87-3138-45b9-935d-d26e22240053","Type":"ContainerDied","Data":"dcfa907c4304d34f307b95e6dd9d791ec2a33d9d3df822888c63a0f708925623"} Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.379605 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcfa907c4304d34f307b95e6dd9d791ec2a33d9d3df822888c63a0f708925623" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.379675 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.496747 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:01:49 crc kubenswrapper[4788]: E1211 10:01:49.497794 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.511059 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8"] Dec 11 10:01:49 crc kubenswrapper[4788]: E1211 10:01:49.511944 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6809b09e-b41b-47ad-8a69-71b49b869d0d" containerName="extract-utilities" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.512064 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="6809b09e-b41b-47ad-8a69-71b49b869d0d" containerName="extract-utilities" Dec 11 10:01:49 crc kubenswrapper[4788]: E1211 10:01:49.512124 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="625e8b87-3138-45b9-935d-d26e22240053" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.512196 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="625e8b87-3138-45b9-935d-d26e22240053" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 11 10:01:49 crc kubenswrapper[4788]: E1211 10:01:49.512320 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6809b09e-b41b-47ad-8a69-71b49b869d0d" containerName="extract-content" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.512400 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="6809b09e-b41b-47ad-8a69-71b49b869d0d" containerName="extract-content" Dec 11 10:01:49 crc kubenswrapper[4788]: E1211 10:01:49.512473 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1b95b41-dc96-4424-84a4-3c647a46ef4e" containerName="keystone-cron" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.512524 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1b95b41-dc96-4424-84a4-3c647a46ef4e" containerName="keystone-cron" Dec 11 10:01:49 crc kubenswrapper[4788]: E1211 10:01:49.512594 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6809b09e-b41b-47ad-8a69-71b49b869d0d" containerName="registry-server" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.512645 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="6809b09e-b41b-47ad-8a69-71b49b869d0d" containerName="registry-server" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.512912 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="6809b09e-b41b-47ad-8a69-71b49b869d0d" containerName="registry-server" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.512977 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1b95b41-dc96-4424-84a4-3c647a46ef4e" containerName="keystone-cron" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.513042 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="625e8b87-3138-45b9-935d-d26e22240053" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.514172 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.516448 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.517000 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.517139 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.517199 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.517408 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.522573 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8"] Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.652507 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.652609 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.652656 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjhsc\" (UniqueName: \"kubernetes.io/projected/20db1208-f411-4f0b-87da-e10fc9a8c4f9-kube-api-access-fjhsc\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.652691 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.652868 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.755327 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.755405 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.755453 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjhsc\" (UniqueName: \"kubernetes.io/projected/20db1208-f411-4f0b-87da-e10fc9a8c4f9-kube-api-access-fjhsc\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.755483 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.755556 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.760015 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.760044 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.760044 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.767676 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.775422 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjhsc\" (UniqueName: \"kubernetes.io/projected/20db1208-f411-4f0b-87da-e10fc9a8c4f9-kube-api-access-fjhsc\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:49 crc kubenswrapper[4788]: I1211 10:01:49.836448 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:01:50 crc kubenswrapper[4788]: I1211 10:01:50.244730 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8"] Dec 11 10:01:50 crc kubenswrapper[4788]: I1211 10:01:50.398388 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" event={"ID":"20db1208-f411-4f0b-87da-e10fc9a8c4f9","Type":"ContainerStarted","Data":"d9d9f75c8f0b5ad1a299a0ea04371fbf719ad4118841436f99e61da2d7810157"} Dec 11 10:01:51 crc kubenswrapper[4788]: I1211 10:01:51.410556 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" event={"ID":"20db1208-f411-4f0b-87da-e10fc9a8c4f9","Type":"ContainerStarted","Data":"9e755707a4ab189ad8f227aa0e3bafb7a4d81ef1307c331c895369f8bb2fe94f"} Dec 11 10:01:51 crc kubenswrapper[4788]: I1211 10:01:51.435035 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" podStartSLOduration=2.270205496 podStartE2EDuration="2.43501134s" podCreationTimestamp="2025-12-11 10:01:49 +0000 UTC" firstStartedPulling="2025-12-11 10:01:50.251355371 +0000 UTC m=+2440.322134957" lastFinishedPulling="2025-12-11 10:01:50.416161215 +0000 UTC m=+2440.486940801" observedRunningTime="2025-12-11 10:01:51.426950756 +0000 UTC m=+2441.497730342" watchObservedRunningTime="2025-12-11 10:01:51.43501134 +0000 UTC m=+2441.505790926" Dec 11 10:02:04 crc kubenswrapper[4788]: I1211 10:02:04.496211 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:02:04 crc kubenswrapper[4788]: E1211 10:02:04.497245 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:02:16 crc kubenswrapper[4788]: I1211 10:02:16.496397 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:02:16 crc kubenswrapper[4788]: E1211 10:02:16.497184 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:02:30 crc kubenswrapper[4788]: I1211 10:02:30.503544 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:02:30 crc kubenswrapper[4788]: E1211 10:02:30.504627 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:02:42 crc kubenswrapper[4788]: I1211 10:02:42.496631 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:02:42 crc kubenswrapper[4788]: E1211 10:02:42.497425 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:02:57 crc kubenswrapper[4788]: I1211 10:02:57.497145 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:02:57 crc kubenswrapper[4788]: E1211 10:02:57.498044 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:03:10 crc kubenswrapper[4788]: I1211 10:03:10.507151 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:03:10 crc kubenswrapper[4788]: E1211 10:03:10.510954 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:03:22 crc kubenswrapper[4788]: I1211 10:03:22.496256 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:03:22 crc kubenswrapper[4788]: E1211 10:03:22.497252 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:03:34 crc kubenswrapper[4788]: I1211 10:03:34.497439 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:03:34 crc kubenswrapper[4788]: E1211 10:03:34.498295 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:03:49 crc kubenswrapper[4788]: I1211 10:03:49.496012 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:03:49 crc kubenswrapper[4788]: E1211 10:03:49.496932 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:04:03 crc kubenswrapper[4788]: I1211 10:04:03.495876 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:04:03 crc kubenswrapper[4788]: E1211 10:04:03.496742 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.071082 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tqh7l"] Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.075742 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.088673 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tqh7l"] Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.168786 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-catalog-content\") pod \"certified-operators-tqh7l\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.168960 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg66t\" (UniqueName: \"kubernetes.io/projected/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-kube-api-access-kg66t\") pod \"certified-operators-tqh7l\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.168986 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-utilities\") pod \"certified-operators-tqh7l\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.271729 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg66t\" (UniqueName: \"kubernetes.io/projected/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-kube-api-access-kg66t\") pod \"certified-operators-tqh7l\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.271818 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-utilities\") pod \"certified-operators-tqh7l\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.271969 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-catalog-content\") pod \"certified-operators-tqh7l\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.272707 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-utilities\") pod \"certified-operators-tqh7l\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.273041 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-catalog-content\") pod \"certified-operators-tqh7l\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.298737 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg66t\" (UniqueName: \"kubernetes.io/projected/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-kube-api-access-kg66t\") pod \"certified-operators-tqh7l\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.407148 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:08 crc kubenswrapper[4788]: I1211 10:04:08.994087 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tqh7l"] Dec 11 10:04:09 crc kubenswrapper[4788]: W1211 10:04:09.024027 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcbd344c3_4eb0_4e56_bf1d_cde541fb29bd.slice/crio-f12b85103dbd293999d2a5e956a3099762e5444c0daabb04f9c63a10ecf0bfe1 WatchSource:0}: Error finding container f12b85103dbd293999d2a5e956a3099762e5444c0daabb04f9c63a10ecf0bfe1: Status 404 returned error can't find the container with id f12b85103dbd293999d2a5e956a3099762e5444c0daabb04f9c63a10ecf0bfe1 Dec 11 10:04:09 crc kubenswrapper[4788]: I1211 10:04:09.776805 4788 generic.go:334] "Generic (PLEG): container finished" podID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" containerID="6eb71f9eb073cbda9403f02ad8ef09c525570f492df211b07cae8def93a1e801" exitCode=0 Dec 11 10:04:09 crc kubenswrapper[4788]: I1211 10:04:09.776985 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tqh7l" event={"ID":"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd","Type":"ContainerDied","Data":"6eb71f9eb073cbda9403f02ad8ef09c525570f492df211b07cae8def93a1e801"} Dec 11 10:04:09 crc kubenswrapper[4788]: I1211 10:04:09.777154 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tqh7l" event={"ID":"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd","Type":"ContainerStarted","Data":"f12b85103dbd293999d2a5e956a3099762e5444c0daabb04f9c63a10ecf0bfe1"} Dec 11 10:04:11 crc kubenswrapper[4788]: I1211 10:04:11.802321 4788 generic.go:334] "Generic (PLEG): container finished" podID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" containerID="8bb4ff5627f3d3d487500702be225e08eeeab816cbab1981ea5dc10a65008059" exitCode=0 Dec 11 10:04:11 crc kubenswrapper[4788]: I1211 10:04:11.802418 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tqh7l" event={"ID":"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd","Type":"ContainerDied","Data":"8bb4ff5627f3d3d487500702be225e08eeeab816cbab1981ea5dc10a65008059"} Dec 11 10:04:13 crc kubenswrapper[4788]: I1211 10:04:13.831885 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tqh7l" event={"ID":"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd","Type":"ContainerStarted","Data":"d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a"} Dec 11 10:04:13 crc kubenswrapper[4788]: I1211 10:04:13.856494 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tqh7l" podStartSLOduration=3.080485516 podStartE2EDuration="5.856476874s" podCreationTimestamp="2025-12-11 10:04:08 +0000 UTC" firstStartedPulling="2025-12-11 10:04:09.779714477 +0000 UTC m=+2579.850494063" lastFinishedPulling="2025-12-11 10:04:12.555705835 +0000 UTC m=+2582.626485421" observedRunningTime="2025-12-11 10:04:13.853351714 +0000 UTC m=+2583.924131300" watchObservedRunningTime="2025-12-11 10:04:13.856476874 +0000 UTC m=+2583.927256460" Dec 11 10:04:17 crc kubenswrapper[4788]: I1211 10:04:17.496082 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:04:17 crc kubenswrapper[4788]: E1211 10:04:17.497007 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:04:18 crc kubenswrapper[4788]: I1211 10:04:18.407823 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:18 crc kubenswrapper[4788]: I1211 10:04:18.407921 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:18 crc kubenswrapper[4788]: I1211 10:04:18.457496 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:18 crc kubenswrapper[4788]: I1211 10:04:18.934671 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:19 crc kubenswrapper[4788]: I1211 10:04:19.005208 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tqh7l"] Dec 11 10:04:20 crc kubenswrapper[4788]: I1211 10:04:20.902935 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tqh7l" podUID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" containerName="registry-server" containerID="cri-o://d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a" gracePeriod=2 Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.421188 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.585019 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-catalog-content\") pod \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.585259 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-utilities\") pod \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.585353 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg66t\" (UniqueName: \"kubernetes.io/projected/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-kube-api-access-kg66t\") pod \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\" (UID: \"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd\") " Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.586983 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-utilities" (OuterVolumeSpecName: "utilities") pod "cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" (UID: "cbd344c3-4eb0-4e56-bf1d-cde541fb29bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.593698 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-kube-api-access-kg66t" (OuterVolumeSpecName: "kube-api-access-kg66t") pod "cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" (UID: "cbd344c3-4eb0-4e56-bf1d-cde541fb29bd"). InnerVolumeSpecName "kube-api-access-kg66t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.688459 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg66t\" (UniqueName: \"kubernetes.io/projected/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-kube-api-access-kg66t\") on node \"crc\" DevicePath \"\"" Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.688495 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.916282 4788 generic.go:334] "Generic (PLEG): container finished" podID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" containerID="d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a" exitCode=0 Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.916351 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tqh7l" event={"ID":"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd","Type":"ContainerDied","Data":"d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a"} Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.916371 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tqh7l" Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.916396 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tqh7l" event={"ID":"cbd344c3-4eb0-4e56-bf1d-cde541fb29bd","Type":"ContainerDied","Data":"f12b85103dbd293999d2a5e956a3099762e5444c0daabb04f9c63a10ecf0bfe1"} Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.916418 4788 scope.go:117] "RemoveContainer" containerID="d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a" Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.946248 4788 scope.go:117] "RemoveContainer" containerID="8bb4ff5627f3d3d487500702be225e08eeeab816cbab1981ea5dc10a65008059" Dec 11 10:04:21 crc kubenswrapper[4788]: I1211 10:04:21.988945 4788 scope.go:117] "RemoveContainer" containerID="6eb71f9eb073cbda9403f02ad8ef09c525570f492df211b07cae8def93a1e801" Dec 11 10:04:22 crc kubenswrapper[4788]: I1211 10:04:22.030878 4788 scope.go:117] "RemoveContainer" containerID="d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a" Dec 11 10:04:22 crc kubenswrapper[4788]: E1211 10:04:22.031515 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a\": container with ID starting with d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a not found: ID does not exist" containerID="d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a" Dec 11 10:04:22 crc kubenswrapper[4788]: I1211 10:04:22.031568 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a"} err="failed to get container status \"d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a\": rpc error: code = NotFound desc = could not find container \"d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a\": container with ID starting with d562858539ea897d5f4c8bba9afecc4aed91dcfea73a6107ec637c42397faf0a not found: ID does not exist" Dec 11 10:04:22 crc kubenswrapper[4788]: I1211 10:04:22.031598 4788 scope.go:117] "RemoveContainer" containerID="8bb4ff5627f3d3d487500702be225e08eeeab816cbab1981ea5dc10a65008059" Dec 11 10:04:22 crc kubenswrapper[4788]: E1211 10:04:22.032156 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bb4ff5627f3d3d487500702be225e08eeeab816cbab1981ea5dc10a65008059\": container with ID starting with 8bb4ff5627f3d3d487500702be225e08eeeab816cbab1981ea5dc10a65008059 not found: ID does not exist" containerID="8bb4ff5627f3d3d487500702be225e08eeeab816cbab1981ea5dc10a65008059" Dec 11 10:04:22 crc kubenswrapper[4788]: I1211 10:04:22.032347 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bb4ff5627f3d3d487500702be225e08eeeab816cbab1981ea5dc10a65008059"} err="failed to get container status \"8bb4ff5627f3d3d487500702be225e08eeeab816cbab1981ea5dc10a65008059\": rpc error: code = NotFound desc = could not find container \"8bb4ff5627f3d3d487500702be225e08eeeab816cbab1981ea5dc10a65008059\": container with ID starting with 8bb4ff5627f3d3d487500702be225e08eeeab816cbab1981ea5dc10a65008059 not found: ID does not exist" Dec 11 10:04:22 crc kubenswrapper[4788]: I1211 10:04:22.032450 4788 scope.go:117] "RemoveContainer" containerID="6eb71f9eb073cbda9403f02ad8ef09c525570f492df211b07cae8def93a1e801" Dec 11 10:04:22 crc kubenswrapper[4788]: E1211 10:04:22.032838 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6eb71f9eb073cbda9403f02ad8ef09c525570f492df211b07cae8def93a1e801\": container with ID starting with 6eb71f9eb073cbda9403f02ad8ef09c525570f492df211b07cae8def93a1e801 not found: ID does not exist" containerID="6eb71f9eb073cbda9403f02ad8ef09c525570f492df211b07cae8def93a1e801" Dec 11 10:04:22 crc kubenswrapper[4788]: I1211 10:04:22.032928 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6eb71f9eb073cbda9403f02ad8ef09c525570f492df211b07cae8def93a1e801"} err="failed to get container status \"6eb71f9eb073cbda9403f02ad8ef09c525570f492df211b07cae8def93a1e801\": rpc error: code = NotFound desc = could not find container \"6eb71f9eb073cbda9403f02ad8ef09c525570f492df211b07cae8def93a1e801\": container with ID starting with 6eb71f9eb073cbda9403f02ad8ef09c525570f492df211b07cae8def93a1e801 not found: ID does not exist" Dec 11 10:04:22 crc kubenswrapper[4788]: I1211 10:04:22.055790 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" (UID: "cbd344c3-4eb0-4e56-bf1d-cde541fb29bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:04:22 crc kubenswrapper[4788]: I1211 10:04:22.098267 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:04:22 crc kubenswrapper[4788]: I1211 10:04:22.269267 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tqh7l"] Dec 11 10:04:22 crc kubenswrapper[4788]: I1211 10:04:22.279453 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tqh7l"] Dec 11 10:04:22 crc kubenswrapper[4788]: I1211 10:04:22.515316 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" path="/var/lib/kubelet/pods/cbd344c3-4eb0-4e56-bf1d-cde541fb29bd/volumes" Dec 11 10:04:30 crc kubenswrapper[4788]: I1211 10:04:30.495328 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:04:31 crc kubenswrapper[4788]: I1211 10:04:31.199029 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"cc8da15a4fca74df4781babb161f153059f0ac71c06e9a64dfa2479b892d0a8b"} Dec 11 10:05:01 crc kubenswrapper[4788]: I1211 10:05:01.857596 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2ttns"] Dec 11 10:05:01 crc kubenswrapper[4788]: E1211 10:05:01.858650 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" containerName="extract-content" Dec 11 10:05:01 crc kubenswrapper[4788]: I1211 10:05:01.858668 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" containerName="extract-content" Dec 11 10:05:01 crc kubenswrapper[4788]: E1211 10:05:01.858702 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" containerName="extract-utilities" Dec 11 10:05:01 crc kubenswrapper[4788]: I1211 10:05:01.858708 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" containerName="extract-utilities" Dec 11 10:05:01 crc kubenswrapper[4788]: E1211 10:05:01.858721 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" containerName="registry-server" Dec 11 10:05:01 crc kubenswrapper[4788]: I1211 10:05:01.858731 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" containerName="registry-server" Dec 11 10:05:01 crc kubenswrapper[4788]: I1211 10:05:01.858949 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbd344c3-4eb0-4e56-bf1d-cde541fb29bd" containerName="registry-server" Dec 11 10:05:01 crc kubenswrapper[4788]: I1211 10:05:01.860562 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:01 crc kubenswrapper[4788]: I1211 10:05:01.881383 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ttns"] Dec 11 10:05:01 crc kubenswrapper[4788]: I1211 10:05:01.939137 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsgkb\" (UniqueName: \"kubernetes.io/projected/5a35df7f-52d8-494a-9b39-b6cd77758924-kube-api-access-bsgkb\") pod \"redhat-marketplace-2ttns\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:01 crc kubenswrapper[4788]: I1211 10:05:01.939315 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-catalog-content\") pod \"redhat-marketplace-2ttns\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:01 crc kubenswrapper[4788]: I1211 10:05:01.939355 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-utilities\") pod \"redhat-marketplace-2ttns\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:02 crc kubenswrapper[4788]: I1211 10:05:02.041383 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsgkb\" (UniqueName: \"kubernetes.io/projected/5a35df7f-52d8-494a-9b39-b6cd77758924-kube-api-access-bsgkb\") pod \"redhat-marketplace-2ttns\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:02 crc kubenswrapper[4788]: I1211 10:05:02.041491 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-catalog-content\") pod \"redhat-marketplace-2ttns\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:02 crc kubenswrapper[4788]: I1211 10:05:02.041536 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-utilities\") pod \"redhat-marketplace-2ttns\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:02 crc kubenswrapper[4788]: I1211 10:05:02.042359 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-utilities\") pod \"redhat-marketplace-2ttns\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:02 crc kubenswrapper[4788]: I1211 10:05:02.042624 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-catalog-content\") pod \"redhat-marketplace-2ttns\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:02 crc kubenswrapper[4788]: I1211 10:05:02.064778 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsgkb\" (UniqueName: \"kubernetes.io/projected/5a35df7f-52d8-494a-9b39-b6cd77758924-kube-api-access-bsgkb\") pod \"redhat-marketplace-2ttns\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:02 crc kubenswrapper[4788]: I1211 10:05:02.197994 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:02 crc kubenswrapper[4788]: I1211 10:05:02.727098 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ttns"] Dec 11 10:05:03 crc kubenswrapper[4788]: I1211 10:05:03.600911 4788 generic.go:334] "Generic (PLEG): container finished" podID="5a35df7f-52d8-494a-9b39-b6cd77758924" containerID="cc4f5f847bd71ae3b8b282809c856dc6715d88f7829a8d87fc519fb37b1d482b" exitCode=0 Dec 11 10:05:03 crc kubenswrapper[4788]: I1211 10:05:03.602903 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ttns" event={"ID":"5a35df7f-52d8-494a-9b39-b6cd77758924","Type":"ContainerDied","Data":"cc4f5f847bd71ae3b8b282809c856dc6715d88f7829a8d87fc519fb37b1d482b"} Dec 11 10:05:03 crc kubenswrapper[4788]: I1211 10:05:03.603088 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ttns" event={"ID":"5a35df7f-52d8-494a-9b39-b6cd77758924","Type":"ContainerStarted","Data":"80b6074d4ba8882c48276428b450b2fe3f9e5ab31a5ef303988b35e5dabab142"} Dec 11 10:05:05 crc kubenswrapper[4788]: I1211 10:05:05.626047 4788 generic.go:334] "Generic (PLEG): container finished" podID="5a35df7f-52d8-494a-9b39-b6cd77758924" containerID="688ffeb75c5ae76c89f64e0ea0fa1fd39317fdc17a5e8552631e0116d0e14d85" exitCode=0 Dec 11 10:05:05 crc kubenswrapper[4788]: I1211 10:05:05.626129 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ttns" event={"ID":"5a35df7f-52d8-494a-9b39-b6cd77758924","Type":"ContainerDied","Data":"688ffeb75c5ae76c89f64e0ea0fa1fd39317fdc17a5e8552631e0116d0e14d85"} Dec 11 10:05:06 crc kubenswrapper[4788]: I1211 10:05:06.648849 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ttns" event={"ID":"5a35df7f-52d8-494a-9b39-b6cd77758924","Type":"ContainerStarted","Data":"4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd"} Dec 11 10:05:06 crc kubenswrapper[4788]: I1211 10:05:06.690787 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2ttns" podStartSLOduration=2.97183773 podStartE2EDuration="5.690732052s" podCreationTimestamp="2025-12-11 10:05:01 +0000 UTC" firstStartedPulling="2025-12-11 10:05:03.609986022 +0000 UTC m=+2633.680765608" lastFinishedPulling="2025-12-11 10:05:06.328880344 +0000 UTC m=+2636.399659930" observedRunningTime="2025-12-11 10:05:06.675728272 +0000 UTC m=+2636.746507868" watchObservedRunningTime="2025-12-11 10:05:06.690732052 +0000 UTC m=+2636.761511638" Dec 11 10:05:12 crc kubenswrapper[4788]: I1211 10:05:12.199215 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:12 crc kubenswrapper[4788]: I1211 10:05:12.199897 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:12 crc kubenswrapper[4788]: I1211 10:05:12.259386 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:12 crc kubenswrapper[4788]: I1211 10:05:12.857095 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:12 crc kubenswrapper[4788]: I1211 10:05:12.917682 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ttns"] Dec 11 10:05:14 crc kubenswrapper[4788]: I1211 10:05:14.817680 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2ttns" podUID="5a35df7f-52d8-494a-9b39-b6cd77758924" containerName="registry-server" containerID="cri-o://4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd" gracePeriod=2 Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.403612 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.436081 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bsgkb\" (UniqueName: \"kubernetes.io/projected/5a35df7f-52d8-494a-9b39-b6cd77758924-kube-api-access-bsgkb\") pod \"5a35df7f-52d8-494a-9b39-b6cd77758924\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.436386 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-utilities\") pod \"5a35df7f-52d8-494a-9b39-b6cd77758924\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.436419 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-catalog-content\") pod \"5a35df7f-52d8-494a-9b39-b6cd77758924\" (UID: \"5a35df7f-52d8-494a-9b39-b6cd77758924\") " Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.437833 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-utilities" (OuterVolumeSpecName: "utilities") pod "5a35df7f-52d8-494a-9b39-b6cd77758924" (UID: "5a35df7f-52d8-494a-9b39-b6cd77758924"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.440711 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.446057 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a35df7f-52d8-494a-9b39-b6cd77758924-kube-api-access-bsgkb" (OuterVolumeSpecName: "kube-api-access-bsgkb") pod "5a35df7f-52d8-494a-9b39-b6cd77758924" (UID: "5a35df7f-52d8-494a-9b39-b6cd77758924"). InnerVolumeSpecName "kube-api-access-bsgkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.461206 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5a35df7f-52d8-494a-9b39-b6cd77758924" (UID: "5a35df7f-52d8-494a-9b39-b6cd77758924"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.543646 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bsgkb\" (UniqueName: \"kubernetes.io/projected/5a35df7f-52d8-494a-9b39-b6cd77758924-kube-api-access-bsgkb\") on node \"crc\" DevicePath \"\"" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.543715 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a35df7f-52d8-494a-9b39-b6cd77758924-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.831272 4788 generic.go:334] "Generic (PLEG): container finished" podID="5a35df7f-52d8-494a-9b39-b6cd77758924" containerID="4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd" exitCode=0 Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.831346 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ttns" event={"ID":"5a35df7f-52d8-494a-9b39-b6cd77758924","Type":"ContainerDied","Data":"4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd"} Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.831379 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ttns" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.831409 4788 scope.go:117] "RemoveContainer" containerID="4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.831392 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ttns" event={"ID":"5a35df7f-52d8-494a-9b39-b6cd77758924","Type":"ContainerDied","Data":"80b6074d4ba8882c48276428b450b2fe3f9e5ab31a5ef303988b35e5dabab142"} Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.867633 4788 scope.go:117] "RemoveContainer" containerID="688ffeb75c5ae76c89f64e0ea0fa1fd39317fdc17a5e8552631e0116d0e14d85" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.880362 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ttns"] Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.890894 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ttns"] Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.915517 4788 scope.go:117] "RemoveContainer" containerID="cc4f5f847bd71ae3b8b282809c856dc6715d88f7829a8d87fc519fb37b1d482b" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.947156 4788 scope.go:117] "RemoveContainer" containerID="4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd" Dec 11 10:05:15 crc kubenswrapper[4788]: E1211 10:05:15.947890 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd\": container with ID starting with 4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd not found: ID does not exist" containerID="4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.947943 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd"} err="failed to get container status \"4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd\": rpc error: code = NotFound desc = could not find container \"4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd\": container with ID starting with 4d1c755c8c8e54b9f08d079e6643800cd274cdcf4591df0d1e9a48514b8c6bdd not found: ID does not exist" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.947977 4788 scope.go:117] "RemoveContainer" containerID="688ffeb75c5ae76c89f64e0ea0fa1fd39317fdc17a5e8552631e0116d0e14d85" Dec 11 10:05:15 crc kubenswrapper[4788]: E1211 10:05:15.948657 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"688ffeb75c5ae76c89f64e0ea0fa1fd39317fdc17a5e8552631e0116d0e14d85\": container with ID starting with 688ffeb75c5ae76c89f64e0ea0fa1fd39317fdc17a5e8552631e0116d0e14d85 not found: ID does not exist" containerID="688ffeb75c5ae76c89f64e0ea0fa1fd39317fdc17a5e8552631e0116d0e14d85" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.948689 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"688ffeb75c5ae76c89f64e0ea0fa1fd39317fdc17a5e8552631e0116d0e14d85"} err="failed to get container status \"688ffeb75c5ae76c89f64e0ea0fa1fd39317fdc17a5e8552631e0116d0e14d85\": rpc error: code = NotFound desc = could not find container \"688ffeb75c5ae76c89f64e0ea0fa1fd39317fdc17a5e8552631e0116d0e14d85\": container with ID starting with 688ffeb75c5ae76c89f64e0ea0fa1fd39317fdc17a5e8552631e0116d0e14d85 not found: ID does not exist" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.948715 4788 scope.go:117] "RemoveContainer" containerID="cc4f5f847bd71ae3b8b282809c856dc6715d88f7829a8d87fc519fb37b1d482b" Dec 11 10:05:15 crc kubenswrapper[4788]: E1211 10:05:15.950286 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc4f5f847bd71ae3b8b282809c856dc6715d88f7829a8d87fc519fb37b1d482b\": container with ID starting with cc4f5f847bd71ae3b8b282809c856dc6715d88f7829a8d87fc519fb37b1d482b not found: ID does not exist" containerID="cc4f5f847bd71ae3b8b282809c856dc6715d88f7829a8d87fc519fb37b1d482b" Dec 11 10:05:15 crc kubenswrapper[4788]: I1211 10:05:15.950434 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc4f5f847bd71ae3b8b282809c856dc6715d88f7829a8d87fc519fb37b1d482b"} err="failed to get container status \"cc4f5f847bd71ae3b8b282809c856dc6715d88f7829a8d87fc519fb37b1d482b\": rpc error: code = NotFound desc = could not find container \"cc4f5f847bd71ae3b8b282809c856dc6715d88f7829a8d87fc519fb37b1d482b\": container with ID starting with cc4f5f847bd71ae3b8b282809c856dc6715d88f7829a8d87fc519fb37b1d482b not found: ID does not exist" Dec 11 10:05:16 crc kubenswrapper[4788]: I1211 10:05:16.509765 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a35df7f-52d8-494a-9b39-b6cd77758924" path="/var/lib/kubelet/pods/5a35df7f-52d8-494a-9b39-b6cd77758924/volumes" Dec 11 10:06:13 crc kubenswrapper[4788]: I1211 10:06:13.421805 4788 generic.go:334] "Generic (PLEG): container finished" podID="20db1208-f411-4f0b-87da-e10fc9a8c4f9" containerID="9e755707a4ab189ad8f227aa0e3bafb7a4d81ef1307c331c895369f8bb2fe94f" exitCode=0 Dec 11 10:06:13 crc kubenswrapper[4788]: I1211 10:06:13.422301 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" event={"ID":"20db1208-f411-4f0b-87da-e10fc9a8c4f9","Type":"ContainerDied","Data":"9e755707a4ab189ad8f227aa0e3bafb7a4d81ef1307c331c895369f8bb2fe94f"} Dec 11 10:06:14 crc kubenswrapper[4788]: I1211 10:06:14.858711 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:06:14 crc kubenswrapper[4788]: I1211 10:06:14.897416 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-ssh-key\") pod \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " Dec 11 10:06:14 crc kubenswrapper[4788]: I1211 10:06:14.897838 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjhsc\" (UniqueName: \"kubernetes.io/projected/20db1208-f411-4f0b-87da-e10fc9a8c4f9-kube-api-access-fjhsc\") pod \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " Dec 11 10:06:14 crc kubenswrapper[4788]: I1211 10:06:14.906685 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20db1208-f411-4f0b-87da-e10fc9a8c4f9-kube-api-access-fjhsc" (OuterVolumeSpecName: "kube-api-access-fjhsc") pod "20db1208-f411-4f0b-87da-e10fc9a8c4f9" (UID: "20db1208-f411-4f0b-87da-e10fc9a8c4f9"). InnerVolumeSpecName "kube-api-access-fjhsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:06:14 crc kubenswrapper[4788]: I1211 10:06:14.932660 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "20db1208-f411-4f0b-87da-e10fc9a8c4f9" (UID: "20db1208-f411-4f0b-87da-e10fc9a8c4f9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.000088 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-inventory\") pod \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.000172 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-secret-0\") pod \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.000251 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-combined-ca-bundle\") pod \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\" (UID: \"20db1208-f411-4f0b-87da-e10fc9a8c4f9\") " Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.000968 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjhsc\" (UniqueName: \"kubernetes.io/projected/20db1208-f411-4f0b-87da-e10fc9a8c4f9-kube-api-access-fjhsc\") on node \"crc\" DevicePath \"\"" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.001054 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.004682 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "20db1208-f411-4f0b-87da-e10fc9a8c4f9" (UID: "20db1208-f411-4f0b-87da-e10fc9a8c4f9"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.028011 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-inventory" (OuterVolumeSpecName: "inventory") pod "20db1208-f411-4f0b-87da-e10fc9a8c4f9" (UID: "20db1208-f411-4f0b-87da-e10fc9a8c4f9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.031684 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "20db1208-f411-4f0b-87da-e10fc9a8c4f9" (UID: "20db1208-f411-4f0b-87da-e10fc9a8c4f9"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.101825 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.102202 4788 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.102357 4788 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20db1208-f411-4f0b-87da-e10fc9a8c4f9-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.442013 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" event={"ID":"20db1208-f411-4f0b-87da-e10fc9a8c4f9","Type":"ContainerDied","Data":"d9d9f75c8f0b5ad1a299a0ea04371fbf719ad4118841436f99e61da2d7810157"} Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.442058 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.442077 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9d9f75c8f0b5ad1a299a0ea04371fbf719ad4118841436f99e61da2d7810157" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.544480 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f"] Dec 11 10:06:15 crc kubenswrapper[4788]: E1211 10:06:15.545212 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20db1208-f411-4f0b-87da-e10fc9a8c4f9" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.545315 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="20db1208-f411-4f0b-87da-e10fc9a8c4f9" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 11 10:06:15 crc kubenswrapper[4788]: E1211 10:06:15.545381 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a35df7f-52d8-494a-9b39-b6cd77758924" containerName="extract-utilities" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.545436 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a35df7f-52d8-494a-9b39-b6cd77758924" containerName="extract-utilities" Dec 11 10:06:15 crc kubenswrapper[4788]: E1211 10:06:15.545529 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a35df7f-52d8-494a-9b39-b6cd77758924" containerName="registry-server" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.545599 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a35df7f-52d8-494a-9b39-b6cd77758924" containerName="registry-server" Dec 11 10:06:15 crc kubenswrapper[4788]: E1211 10:06:15.545666 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a35df7f-52d8-494a-9b39-b6cd77758924" containerName="extract-content" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.545717 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a35df7f-52d8-494a-9b39-b6cd77758924" containerName="extract-content" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.545977 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="20db1208-f411-4f0b-87da-e10fc9a8c4f9" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.546059 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a35df7f-52d8-494a-9b39-b6cd77758924" containerName="registry-server" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.546882 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.558784 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.559348 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.559362 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.558943 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.558998 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.559041 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.558784 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.564449 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f"] Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.716367 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.716828 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.716943 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.716979 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.717336 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.717396 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.717451 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h86w\" (UniqueName: \"kubernetes.io/projected/2d2d45a1-244d-4e91-be2b-db12eb484a25-kube-api-access-9h86w\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.717593 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.717720 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.819811 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.819901 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.819939 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.819965 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.820021 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.820044 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.820136 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.820158 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.820180 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h86w\" (UniqueName: \"kubernetes.io/projected/2d2d45a1-244d-4e91-be2b-db12eb484a25-kube-api-access-9h86w\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.823330 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.825038 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.825634 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.825689 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.825958 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.826031 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.827373 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.827651 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.840169 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h86w\" (UniqueName: \"kubernetes.io/projected/2d2d45a1-244d-4e91-be2b-db12eb484a25-kube-api-access-9h86w\") pod \"nova-edpm-deployment-openstack-edpm-ipam-5mm6f\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:15 crc kubenswrapper[4788]: I1211 10:06:15.884404 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:06:16 crc kubenswrapper[4788]: I1211 10:06:16.439130 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f"] Dec 11 10:06:16 crc kubenswrapper[4788]: I1211 10:06:16.442150 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:06:16 crc kubenswrapper[4788]: I1211 10:06:16.461423 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" event={"ID":"2d2d45a1-244d-4e91-be2b-db12eb484a25","Type":"ContainerStarted","Data":"5dfa5989cad36de819b9d9c1ff517cb18639ef88e90be062d4855dc00e8bc796"} Dec 11 10:06:17 crc kubenswrapper[4788]: I1211 10:06:17.471191 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" event={"ID":"2d2d45a1-244d-4e91-be2b-db12eb484a25","Type":"ContainerStarted","Data":"7e92b1c52d3b45d3275554512b1991d35d5c245696a55fda1c6e32d1ea83e266"} Dec 11 10:06:17 crc kubenswrapper[4788]: I1211 10:06:17.494178 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" podStartSLOduration=2.275872713 podStartE2EDuration="2.494157264s" podCreationTimestamp="2025-12-11 10:06:15 +0000 UTC" firstStartedPulling="2025-12-11 10:06:16.441925572 +0000 UTC m=+2706.512705158" lastFinishedPulling="2025-12-11 10:06:16.660210113 +0000 UTC m=+2706.730989709" observedRunningTime="2025-12-11 10:06:17.491703682 +0000 UTC m=+2707.562483268" watchObservedRunningTime="2025-12-11 10:06:17.494157264 +0000 UTC m=+2707.564936850" Dec 11 10:06:51 crc kubenswrapper[4788]: I1211 10:06:51.369545 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:06:51 crc kubenswrapper[4788]: I1211 10:06:51.370267 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.792121 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-j9dl5"] Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.796423 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.808023 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j9dl5"] Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.849659 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-catalog-content\") pod \"redhat-operators-j9dl5\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.849900 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zngt5\" (UniqueName: \"kubernetes.io/projected/69a61b68-70d0-4f42-ad31-0d28a8289eb2-kube-api-access-zngt5\") pod \"redhat-operators-j9dl5\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.850132 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-utilities\") pod \"redhat-operators-j9dl5\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.952203 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-utilities\") pod \"redhat-operators-j9dl5\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.952731 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-catalog-content\") pod \"redhat-operators-j9dl5\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.952923 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zngt5\" (UniqueName: \"kubernetes.io/projected/69a61b68-70d0-4f42-ad31-0d28a8289eb2-kube-api-access-zngt5\") pod \"redhat-operators-j9dl5\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.952930 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-utilities\") pod \"redhat-operators-j9dl5\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.953509 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-catalog-content\") pod \"redhat-operators-j9dl5\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:20 crc kubenswrapper[4788]: I1211 10:07:20.982501 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zngt5\" (UniqueName: \"kubernetes.io/projected/69a61b68-70d0-4f42-ad31-0d28a8289eb2-kube-api-access-zngt5\") pod \"redhat-operators-j9dl5\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:21 crc kubenswrapper[4788]: I1211 10:07:21.135483 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:21 crc kubenswrapper[4788]: I1211 10:07:21.370506 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:07:21 crc kubenswrapper[4788]: I1211 10:07:21.370914 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:07:21 crc kubenswrapper[4788]: I1211 10:07:21.485015 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j9dl5"] Dec 11 10:07:22 crc kubenswrapper[4788]: I1211 10:07:22.167100 4788 generic.go:334] "Generic (PLEG): container finished" podID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerID="3511cdb126ab779b7a6b9e56890fd553a832042aca330d5ac939784b620edfc8" exitCode=0 Dec 11 10:07:22 crc kubenswrapper[4788]: I1211 10:07:22.167173 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9dl5" event={"ID":"69a61b68-70d0-4f42-ad31-0d28a8289eb2","Type":"ContainerDied","Data":"3511cdb126ab779b7a6b9e56890fd553a832042aca330d5ac939784b620edfc8"} Dec 11 10:07:22 crc kubenswrapper[4788]: I1211 10:07:22.167269 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9dl5" event={"ID":"69a61b68-70d0-4f42-ad31-0d28a8289eb2","Type":"ContainerStarted","Data":"69339f096d98f4832cd5cb1e9eb4547d62109d673116083c7b74bb068277eb09"} Dec 11 10:07:23 crc kubenswrapper[4788]: I1211 10:07:23.179706 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9dl5" event={"ID":"69a61b68-70d0-4f42-ad31-0d28a8289eb2","Type":"ContainerStarted","Data":"fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161"} Dec 11 10:07:26 crc kubenswrapper[4788]: I1211 10:07:26.226353 4788 generic.go:334] "Generic (PLEG): container finished" podID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerID="fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161" exitCode=0 Dec 11 10:07:26 crc kubenswrapper[4788]: I1211 10:07:26.226426 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9dl5" event={"ID":"69a61b68-70d0-4f42-ad31-0d28a8289eb2","Type":"ContainerDied","Data":"fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161"} Dec 11 10:07:27 crc kubenswrapper[4788]: I1211 10:07:27.245208 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9dl5" event={"ID":"69a61b68-70d0-4f42-ad31-0d28a8289eb2","Type":"ContainerStarted","Data":"26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f"} Dec 11 10:07:27 crc kubenswrapper[4788]: I1211 10:07:27.268993 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-j9dl5" podStartSLOduration=2.589505103 podStartE2EDuration="7.268970771s" podCreationTimestamp="2025-12-11 10:07:20 +0000 UTC" firstStartedPulling="2025-12-11 10:07:22.171187553 +0000 UTC m=+2772.241967139" lastFinishedPulling="2025-12-11 10:07:26.850653221 +0000 UTC m=+2776.921432807" observedRunningTime="2025-12-11 10:07:27.261558233 +0000 UTC m=+2777.332337819" watchObservedRunningTime="2025-12-11 10:07:27.268970771 +0000 UTC m=+2777.339750357" Dec 11 10:07:31 crc kubenswrapper[4788]: I1211 10:07:31.135888 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:31 crc kubenswrapper[4788]: I1211 10:07:31.136521 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:32 crc kubenswrapper[4788]: I1211 10:07:32.183575 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-j9dl5" podUID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerName="registry-server" probeResult="failure" output=< Dec 11 10:07:32 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 10:07:32 crc kubenswrapper[4788]: > Dec 11 10:07:41 crc kubenswrapper[4788]: I1211 10:07:41.187334 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:41 crc kubenswrapper[4788]: I1211 10:07:41.238066 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:41 crc kubenswrapper[4788]: I1211 10:07:41.429315 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j9dl5"] Dec 11 10:07:42 crc kubenswrapper[4788]: I1211 10:07:42.387531 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-j9dl5" podUID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerName="registry-server" containerID="cri-o://26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f" gracePeriod=2 Dec 11 10:07:42 crc kubenswrapper[4788]: I1211 10:07:42.897917 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:42 crc kubenswrapper[4788]: I1211 10:07:42.965497 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-utilities\") pod \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " Dec 11 10:07:42 crc kubenswrapper[4788]: I1211 10:07:42.965558 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-catalog-content\") pod \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " Dec 11 10:07:42 crc kubenswrapper[4788]: I1211 10:07:42.965630 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zngt5\" (UniqueName: \"kubernetes.io/projected/69a61b68-70d0-4f42-ad31-0d28a8289eb2-kube-api-access-zngt5\") pod \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\" (UID: \"69a61b68-70d0-4f42-ad31-0d28a8289eb2\") " Dec 11 10:07:42 crc kubenswrapper[4788]: I1211 10:07:42.967777 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-utilities" (OuterVolumeSpecName: "utilities") pod "69a61b68-70d0-4f42-ad31-0d28a8289eb2" (UID: "69a61b68-70d0-4f42-ad31-0d28a8289eb2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:07:42 crc kubenswrapper[4788]: I1211 10:07:42.972541 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69a61b68-70d0-4f42-ad31-0d28a8289eb2-kube-api-access-zngt5" (OuterVolumeSpecName: "kube-api-access-zngt5") pod "69a61b68-70d0-4f42-ad31-0d28a8289eb2" (UID: "69a61b68-70d0-4f42-ad31-0d28a8289eb2"). InnerVolumeSpecName "kube-api-access-zngt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.068413 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.068444 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zngt5\" (UniqueName: \"kubernetes.io/projected/69a61b68-70d0-4f42-ad31-0d28a8289eb2-kube-api-access-zngt5\") on node \"crc\" DevicePath \"\"" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.081498 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69a61b68-70d0-4f42-ad31-0d28a8289eb2" (UID: "69a61b68-70d0-4f42-ad31-0d28a8289eb2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.170526 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69a61b68-70d0-4f42-ad31-0d28a8289eb2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.411515 4788 generic.go:334] "Generic (PLEG): container finished" podID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerID="26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f" exitCode=0 Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.411633 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9dl5" event={"ID":"69a61b68-70d0-4f42-ad31-0d28a8289eb2","Type":"ContainerDied","Data":"26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f"} Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.411670 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j9dl5" event={"ID":"69a61b68-70d0-4f42-ad31-0d28a8289eb2","Type":"ContainerDied","Data":"69339f096d98f4832cd5cb1e9eb4547d62109d673116083c7b74bb068277eb09"} Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.411680 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j9dl5" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.411688 4788 scope.go:117] "RemoveContainer" containerID="26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.438647 4788 scope.go:117] "RemoveContainer" containerID="fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.455999 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j9dl5"] Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.466365 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-j9dl5"] Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.481573 4788 scope.go:117] "RemoveContainer" containerID="3511cdb126ab779b7a6b9e56890fd553a832042aca330d5ac939784b620edfc8" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.521559 4788 scope.go:117] "RemoveContainer" containerID="26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f" Dec 11 10:07:43 crc kubenswrapper[4788]: E1211 10:07:43.522210 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f\": container with ID starting with 26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f not found: ID does not exist" containerID="26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.522276 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f"} err="failed to get container status \"26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f\": rpc error: code = NotFound desc = could not find container \"26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f\": container with ID starting with 26e6816bc4cc8c00a319cb0bfc05510abff7ba78735815a185b3ac336124ba1f not found: ID does not exist" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.522309 4788 scope.go:117] "RemoveContainer" containerID="fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161" Dec 11 10:07:43 crc kubenswrapper[4788]: E1211 10:07:43.522742 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161\": container with ID starting with fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161 not found: ID does not exist" containerID="fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.522786 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161"} err="failed to get container status \"fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161\": rpc error: code = NotFound desc = could not find container \"fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161\": container with ID starting with fb3fcbbe28e51537654ce0297dac7026ab83d0bfefbabb843cd3c56586d70161 not found: ID does not exist" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.522812 4788 scope.go:117] "RemoveContainer" containerID="3511cdb126ab779b7a6b9e56890fd553a832042aca330d5ac939784b620edfc8" Dec 11 10:07:43 crc kubenswrapper[4788]: E1211 10:07:43.523077 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3511cdb126ab779b7a6b9e56890fd553a832042aca330d5ac939784b620edfc8\": container with ID starting with 3511cdb126ab779b7a6b9e56890fd553a832042aca330d5ac939784b620edfc8 not found: ID does not exist" containerID="3511cdb126ab779b7a6b9e56890fd553a832042aca330d5ac939784b620edfc8" Dec 11 10:07:43 crc kubenswrapper[4788]: I1211 10:07:43.523119 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3511cdb126ab779b7a6b9e56890fd553a832042aca330d5ac939784b620edfc8"} err="failed to get container status \"3511cdb126ab779b7a6b9e56890fd553a832042aca330d5ac939784b620edfc8\": rpc error: code = NotFound desc = could not find container \"3511cdb126ab779b7a6b9e56890fd553a832042aca330d5ac939784b620edfc8\": container with ID starting with 3511cdb126ab779b7a6b9e56890fd553a832042aca330d5ac939784b620edfc8 not found: ID does not exist" Dec 11 10:07:44 crc kubenswrapper[4788]: I1211 10:07:44.506981 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" path="/var/lib/kubelet/pods/69a61b68-70d0-4f42-ad31-0d28a8289eb2/volumes" Dec 11 10:07:51 crc kubenswrapper[4788]: I1211 10:07:51.369917 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:07:51 crc kubenswrapper[4788]: I1211 10:07:51.371606 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:07:51 crc kubenswrapper[4788]: I1211 10:07:51.371837 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 10:07:51 crc kubenswrapper[4788]: I1211 10:07:51.372869 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cc8da15a4fca74df4781babb161f153059f0ac71c06e9a64dfa2479b892d0a8b"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:07:51 crc kubenswrapper[4788]: I1211 10:07:51.373105 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://cc8da15a4fca74df4781babb161f153059f0ac71c06e9a64dfa2479b892d0a8b" gracePeriod=600 Dec 11 10:07:52 crc kubenswrapper[4788]: I1211 10:07:52.500081 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="cc8da15a4fca74df4781babb161f153059f0ac71c06e9a64dfa2479b892d0a8b" exitCode=0 Dec 11 10:07:52 crc kubenswrapper[4788]: I1211 10:07:52.513206 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"cc8da15a4fca74df4781babb161f153059f0ac71c06e9a64dfa2479b892d0a8b"} Dec 11 10:07:52 crc kubenswrapper[4788]: I1211 10:07:52.513552 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d"} Dec 11 10:07:52 crc kubenswrapper[4788]: I1211 10:07:52.513649 4788 scope.go:117] "RemoveContainer" containerID="bd70cac9bf07c82af594eedc6283f4594fb93cef093118ac2760589713db2cc3" Dec 11 10:09:06 crc kubenswrapper[4788]: I1211 10:09:06.339321 4788 generic.go:334] "Generic (PLEG): container finished" podID="2d2d45a1-244d-4e91-be2b-db12eb484a25" containerID="7e92b1c52d3b45d3275554512b1991d35d5c245696a55fda1c6e32d1ea83e266" exitCode=0 Dec 11 10:09:06 crc kubenswrapper[4788]: I1211 10:09:06.339472 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" event={"ID":"2d2d45a1-244d-4e91-be2b-db12eb484a25","Type":"ContainerDied","Data":"7e92b1c52d3b45d3275554512b1991d35d5c245696a55fda1c6e32d1ea83e266"} Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.827984 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.920473 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-ssh-key\") pod \"2d2d45a1-244d-4e91-be2b-db12eb484a25\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.921430 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-inventory\") pod \"2d2d45a1-244d-4e91-be2b-db12eb484a25\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.921564 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-0\") pod \"2d2d45a1-244d-4e91-be2b-db12eb484a25\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.921591 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-0\") pod \"2d2d45a1-244d-4e91-be2b-db12eb484a25\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.921612 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-1\") pod \"2d2d45a1-244d-4e91-be2b-db12eb484a25\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.921747 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9h86w\" (UniqueName: \"kubernetes.io/projected/2d2d45a1-244d-4e91-be2b-db12eb484a25-kube-api-access-9h86w\") pod \"2d2d45a1-244d-4e91-be2b-db12eb484a25\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.921773 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-1\") pod \"2d2d45a1-244d-4e91-be2b-db12eb484a25\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.921801 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-extra-config-0\") pod \"2d2d45a1-244d-4e91-be2b-db12eb484a25\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.921854 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-combined-ca-bundle\") pod \"2d2d45a1-244d-4e91-be2b-db12eb484a25\" (UID: \"2d2d45a1-244d-4e91-be2b-db12eb484a25\") " Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.928738 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "2d2d45a1-244d-4e91-be2b-db12eb484a25" (UID: "2d2d45a1-244d-4e91-be2b-db12eb484a25"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.933477 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d2d45a1-244d-4e91-be2b-db12eb484a25-kube-api-access-9h86w" (OuterVolumeSpecName: "kube-api-access-9h86w") pod "2d2d45a1-244d-4e91-be2b-db12eb484a25" (UID: "2d2d45a1-244d-4e91-be2b-db12eb484a25"). InnerVolumeSpecName "kube-api-access-9h86w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.958038 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-inventory" (OuterVolumeSpecName: "inventory") pod "2d2d45a1-244d-4e91-be2b-db12eb484a25" (UID: "2d2d45a1-244d-4e91-be2b-db12eb484a25"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.964770 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "2d2d45a1-244d-4e91-be2b-db12eb484a25" (UID: "2d2d45a1-244d-4e91-be2b-db12eb484a25"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.964804 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "2d2d45a1-244d-4e91-be2b-db12eb484a25" (UID: "2d2d45a1-244d-4e91-be2b-db12eb484a25"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.964897 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2d2d45a1-244d-4e91-be2b-db12eb484a25" (UID: "2d2d45a1-244d-4e91-be2b-db12eb484a25"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.965645 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "2d2d45a1-244d-4e91-be2b-db12eb484a25" (UID: "2d2d45a1-244d-4e91-be2b-db12eb484a25"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.968064 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "2d2d45a1-244d-4e91-be2b-db12eb484a25" (UID: "2d2d45a1-244d-4e91-be2b-db12eb484a25"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:09:07 crc kubenswrapper[4788]: I1211 10:09:07.968252 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "2d2d45a1-244d-4e91-be2b-db12eb484a25" (UID: "2d2d45a1-244d-4e91-be2b-db12eb484a25"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.025379 4788 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.025435 4788 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.025447 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.025458 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.025467 4788 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.025499 4788 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.025509 4788 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.025518 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9h86w\" (UniqueName: \"kubernetes.io/projected/2d2d45a1-244d-4e91-be2b-db12eb484a25-kube-api-access-9h86w\") on node \"crc\" DevicePath \"\"" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.025526 4788 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/2d2d45a1-244d-4e91-be2b-db12eb484a25-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.365686 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" event={"ID":"2d2d45a1-244d-4e91-be2b-db12eb484a25","Type":"ContainerDied","Data":"5dfa5989cad36de819b9d9c1ff517cb18639ef88e90be062d4855dc00e8bc796"} Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.366030 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5dfa5989cad36de819b9d9c1ff517cb18639ef88e90be062d4855dc00e8bc796" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.365814 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-5mm6f" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.477743 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7"] Dec 11 10:09:08 crc kubenswrapper[4788]: E1211 10:09:08.478258 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerName="registry-server" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.478277 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerName="registry-server" Dec 11 10:09:08 crc kubenswrapper[4788]: E1211 10:09:08.478288 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerName="extract-content" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.478294 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerName="extract-content" Dec 11 10:09:08 crc kubenswrapper[4788]: E1211 10:09:08.478322 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d2d45a1-244d-4e91-be2b-db12eb484a25" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.478329 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d2d45a1-244d-4e91-be2b-db12eb484a25" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 11 10:09:08 crc kubenswrapper[4788]: E1211 10:09:08.478339 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerName="extract-utilities" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.478345 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerName="extract-utilities" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.478560 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="69a61b68-70d0-4f42-ad31-0d28a8289eb2" containerName="registry-server" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.478572 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d2d45a1-244d-4e91-be2b-db12eb484a25" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.479352 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.481553 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fsk8z" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.481854 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.482128 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.483511 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.483830 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.508136 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7"] Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.637380 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.637940 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.638891 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.639171 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.639838 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.640406 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x7hb\" (UniqueName: \"kubernetes.io/projected/5395fbbe-5f31-4c60-bee6-09b492d13e36-kube-api-access-7x7hb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.640540 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.749084 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.750026 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x7hb\" (UniqueName: \"kubernetes.io/projected/5395fbbe-5f31-4c60-bee6-09b492d13e36-kube-api-access-7x7hb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.750143 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.750905 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.751045 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.751156 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.751361 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.757212 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.757331 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.757812 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.759176 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.764038 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.766897 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.774677 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x7hb\" (UniqueName: \"kubernetes.io/projected/5395fbbe-5f31-4c60-bee6-09b492d13e36-kube-api-access-7x7hb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:08 crc kubenswrapper[4788]: I1211 10:09:08.801165 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:09:09 crc kubenswrapper[4788]: I1211 10:09:09.422573 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7"] Dec 11 10:09:10 crc kubenswrapper[4788]: I1211 10:09:10.390381 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" event={"ID":"5395fbbe-5f31-4c60-bee6-09b492d13e36","Type":"ContainerStarted","Data":"e9b4a80dbfcc14808a040f723ea08007ba8191a42550a75a8d276a5917699273"} Dec 11 10:09:10 crc kubenswrapper[4788]: I1211 10:09:10.390966 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" event={"ID":"5395fbbe-5f31-4c60-bee6-09b492d13e36","Type":"ContainerStarted","Data":"0a34fee7ca44d5c58983b99c8280a8e29e7464f2515924648015ca1149669e20"} Dec 11 10:09:10 crc kubenswrapper[4788]: I1211 10:09:10.421255 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" podStartSLOduration=2.208491203 podStartE2EDuration="2.421209243s" podCreationTimestamp="2025-12-11 10:09:08 +0000 UTC" firstStartedPulling="2025-12-11 10:09:09.424178539 +0000 UTC m=+2879.494958125" lastFinishedPulling="2025-12-11 10:09:09.636896579 +0000 UTC m=+2879.707676165" observedRunningTime="2025-12-11 10:09:10.412668856 +0000 UTC m=+2880.483448542" watchObservedRunningTime="2025-12-11 10:09:10.421209243 +0000 UTC m=+2880.491988829" Dec 11 10:10:21 crc kubenswrapper[4788]: I1211 10:10:21.369656 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:10:21 crc kubenswrapper[4788]: I1211 10:10:21.370311 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:10:51 crc kubenswrapper[4788]: I1211 10:10:51.370878 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:10:51 crc kubenswrapper[4788]: I1211 10:10:51.371589 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:11:16 crc kubenswrapper[4788]: I1211 10:11:16.758316 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7xsxb"] Dec 11 10:11:16 crc kubenswrapper[4788]: I1211 10:11:16.766501 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:16 crc kubenswrapper[4788]: I1211 10:11:16.782740 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7xsxb"] Dec 11 10:11:16 crc kubenswrapper[4788]: I1211 10:11:16.925953 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b52edb02-716e-41ca-9bdb-474fbea1e14f-utilities\") pod \"community-operators-7xsxb\" (UID: \"b52edb02-716e-41ca-9bdb-474fbea1e14f\") " pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:16 crc kubenswrapper[4788]: I1211 10:11:16.926102 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42vtv\" (UniqueName: \"kubernetes.io/projected/b52edb02-716e-41ca-9bdb-474fbea1e14f-kube-api-access-42vtv\") pod \"community-operators-7xsxb\" (UID: \"b52edb02-716e-41ca-9bdb-474fbea1e14f\") " pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:16 crc kubenswrapper[4788]: I1211 10:11:16.926267 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b52edb02-716e-41ca-9bdb-474fbea1e14f-catalog-content\") pod \"community-operators-7xsxb\" (UID: \"b52edb02-716e-41ca-9bdb-474fbea1e14f\") " pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:17 crc kubenswrapper[4788]: I1211 10:11:17.028411 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b52edb02-716e-41ca-9bdb-474fbea1e14f-utilities\") pod \"community-operators-7xsxb\" (UID: \"b52edb02-716e-41ca-9bdb-474fbea1e14f\") " pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:17 crc kubenswrapper[4788]: I1211 10:11:17.028844 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42vtv\" (UniqueName: \"kubernetes.io/projected/b52edb02-716e-41ca-9bdb-474fbea1e14f-kube-api-access-42vtv\") pod \"community-operators-7xsxb\" (UID: \"b52edb02-716e-41ca-9bdb-474fbea1e14f\") " pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:17 crc kubenswrapper[4788]: I1211 10:11:17.028894 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b52edb02-716e-41ca-9bdb-474fbea1e14f-utilities\") pod \"community-operators-7xsxb\" (UID: \"b52edb02-716e-41ca-9bdb-474fbea1e14f\") " pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:17 crc kubenswrapper[4788]: I1211 10:11:17.029127 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b52edb02-716e-41ca-9bdb-474fbea1e14f-catalog-content\") pod \"community-operators-7xsxb\" (UID: \"b52edb02-716e-41ca-9bdb-474fbea1e14f\") " pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:17 crc kubenswrapper[4788]: I1211 10:11:17.029521 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b52edb02-716e-41ca-9bdb-474fbea1e14f-catalog-content\") pod \"community-operators-7xsxb\" (UID: \"b52edb02-716e-41ca-9bdb-474fbea1e14f\") " pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:17 crc kubenswrapper[4788]: I1211 10:11:17.052588 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42vtv\" (UniqueName: \"kubernetes.io/projected/b52edb02-716e-41ca-9bdb-474fbea1e14f-kube-api-access-42vtv\") pod \"community-operators-7xsxb\" (UID: \"b52edb02-716e-41ca-9bdb-474fbea1e14f\") " pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:17 crc kubenswrapper[4788]: I1211 10:11:17.089789 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:17 crc kubenswrapper[4788]: I1211 10:11:17.637948 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7xsxb"] Dec 11 10:11:17 crc kubenswrapper[4788]: I1211 10:11:17.725449 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7xsxb" event={"ID":"b52edb02-716e-41ca-9bdb-474fbea1e14f","Type":"ContainerStarted","Data":"caa63e1aa8f8538a042d4b9d078b3002191d345158d76926441a1fa9075b71a8"} Dec 11 10:11:18 crc kubenswrapper[4788]: I1211 10:11:18.736459 4788 generic.go:334] "Generic (PLEG): container finished" podID="b52edb02-716e-41ca-9bdb-474fbea1e14f" containerID="1d018bc39951048fa079a5c333392ad6d95709292281589598bceac357c01092" exitCode=0 Dec 11 10:11:18 crc kubenswrapper[4788]: I1211 10:11:18.736540 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7xsxb" event={"ID":"b52edb02-716e-41ca-9bdb-474fbea1e14f","Type":"ContainerDied","Data":"1d018bc39951048fa079a5c333392ad6d95709292281589598bceac357c01092"} Dec 11 10:11:18 crc kubenswrapper[4788]: I1211 10:11:18.739267 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:11:21 crc kubenswrapper[4788]: I1211 10:11:21.369215 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:11:21 crc kubenswrapper[4788]: I1211 10:11:21.370398 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:11:21 crc kubenswrapper[4788]: I1211 10:11:21.370582 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 10:11:21 crc kubenswrapper[4788]: I1211 10:11:21.371579 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:11:21 crc kubenswrapper[4788]: I1211 10:11:21.371736 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" gracePeriod=600 Dec 11 10:11:21 crc kubenswrapper[4788]: I1211 10:11:21.794633 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" exitCode=0 Dec 11 10:11:21 crc kubenswrapper[4788]: I1211 10:11:21.795024 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d"} Dec 11 10:11:21 crc kubenswrapper[4788]: I1211 10:11:21.795244 4788 scope.go:117] "RemoveContainer" containerID="cc8da15a4fca74df4781babb161f153059f0ac71c06e9a64dfa2479b892d0a8b" Dec 11 10:11:22 crc kubenswrapper[4788]: E1211 10:11:22.858857 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:11:23 crc kubenswrapper[4788]: I1211 10:11:23.817931 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:11:23 crc kubenswrapper[4788]: E1211 10:11:23.818474 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:11:23 crc kubenswrapper[4788]: I1211 10:11:23.820293 4788 generic.go:334] "Generic (PLEG): container finished" podID="b52edb02-716e-41ca-9bdb-474fbea1e14f" containerID="594327638c7470d47e3b8da4d5b8589e2ac78d956722c8b8e11e90441e860684" exitCode=0 Dec 11 10:11:23 crc kubenswrapper[4788]: I1211 10:11:23.820344 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7xsxb" event={"ID":"b52edb02-716e-41ca-9bdb-474fbea1e14f","Type":"ContainerDied","Data":"594327638c7470d47e3b8da4d5b8589e2ac78d956722c8b8e11e90441e860684"} Dec 11 10:11:26 crc kubenswrapper[4788]: I1211 10:11:26.853819 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7xsxb" event={"ID":"b52edb02-716e-41ca-9bdb-474fbea1e14f","Type":"ContainerStarted","Data":"876c2e81f729e62cd298dc31cba8b8af748290606b7d41216a03f67129d2e6db"} Dec 11 10:11:26 crc kubenswrapper[4788]: I1211 10:11:26.885789 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7xsxb" podStartSLOduration=3.64606897 podStartE2EDuration="10.88575957s" podCreationTimestamp="2025-12-11 10:11:16 +0000 UTC" firstStartedPulling="2025-12-11 10:11:18.738946376 +0000 UTC m=+3008.809725962" lastFinishedPulling="2025-12-11 10:11:25.978636976 +0000 UTC m=+3016.049416562" observedRunningTime="2025-12-11 10:11:26.881478431 +0000 UTC m=+3016.952258017" watchObservedRunningTime="2025-12-11 10:11:26.88575957 +0000 UTC m=+3016.956539156" Dec 11 10:11:27 crc kubenswrapper[4788]: I1211 10:11:27.090688 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:27 crc kubenswrapper[4788]: I1211 10:11:27.090767 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:28 crc kubenswrapper[4788]: I1211 10:11:28.265353 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-7xsxb" podUID="b52edb02-716e-41ca-9bdb-474fbea1e14f" containerName="registry-server" probeResult="failure" output=< Dec 11 10:11:28 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 10:11:28 crc kubenswrapper[4788]: > Dec 11 10:11:35 crc kubenswrapper[4788]: I1211 10:11:35.496769 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:11:35 crc kubenswrapper[4788]: E1211 10:11:35.500107 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.146057 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.205805 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7xsxb" Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.274554 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7xsxb"] Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.387045 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h9g2p"] Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.387383 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h9g2p" podUID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" containerName="registry-server" containerID="cri-o://430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900" gracePeriod=2 Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.919260 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9g2p" Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.960009 4788 generic.go:334] "Generic (PLEG): container finished" podID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" containerID="430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900" exitCode=0 Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.960576 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9g2p" Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.961063 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9g2p" event={"ID":"e4f094f5-cc0c-4f3f-81af-36f4887fae9c","Type":"ContainerDied","Data":"430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900"} Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.961098 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9g2p" event={"ID":"e4f094f5-cc0c-4f3f-81af-36f4887fae9c","Type":"ContainerDied","Data":"da5d0f059d5368a027fbda7aec32251273290695f3561537229a666095176205"} Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.961117 4788 scope.go:117] "RemoveContainer" containerID="430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900" Dec 11 10:11:37 crc kubenswrapper[4788]: I1211 10:11:37.992504 4788 scope.go:117] "RemoveContainer" containerID="373c5119de4de7184c01e51b474c59beb7682922ef162ccd46458eb3a5dba027" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.040973 4788 scope.go:117] "RemoveContainer" containerID="b7b50e0df525d3f1c50f5fdf7beac4bee7104b85c2982a13f94eb9a5dd9d045b" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.048329 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gv5j9\" (UniqueName: \"kubernetes.io/projected/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-kube-api-access-gv5j9\") pod \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.048618 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-utilities\") pod \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.048693 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-catalog-content\") pod \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\" (UID: \"e4f094f5-cc0c-4f3f-81af-36f4887fae9c\") " Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.049673 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-utilities" (OuterVolumeSpecName: "utilities") pod "e4f094f5-cc0c-4f3f-81af-36f4887fae9c" (UID: "e4f094f5-cc0c-4f3f-81af-36f4887fae9c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.059088 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-kube-api-access-gv5j9" (OuterVolumeSpecName: "kube-api-access-gv5j9") pod "e4f094f5-cc0c-4f3f-81af-36f4887fae9c" (UID: "e4f094f5-cc0c-4f3f-81af-36f4887fae9c"). InnerVolumeSpecName "kube-api-access-gv5j9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.123065 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e4f094f5-cc0c-4f3f-81af-36f4887fae9c" (UID: "e4f094f5-cc0c-4f3f-81af-36f4887fae9c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.132761 4788 scope.go:117] "RemoveContainer" containerID="430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900" Dec 11 10:11:38 crc kubenswrapper[4788]: E1211 10:11:38.133275 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900\": container with ID starting with 430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900 not found: ID does not exist" containerID="430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.133323 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900"} err="failed to get container status \"430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900\": rpc error: code = NotFound desc = could not find container \"430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900\": container with ID starting with 430b467cd344ca216fba6021869f8d842defd87760a1960ef19852aa62bde900 not found: ID does not exist" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.133352 4788 scope.go:117] "RemoveContainer" containerID="373c5119de4de7184c01e51b474c59beb7682922ef162ccd46458eb3a5dba027" Dec 11 10:11:38 crc kubenswrapper[4788]: E1211 10:11:38.133821 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"373c5119de4de7184c01e51b474c59beb7682922ef162ccd46458eb3a5dba027\": container with ID starting with 373c5119de4de7184c01e51b474c59beb7682922ef162ccd46458eb3a5dba027 not found: ID does not exist" containerID="373c5119de4de7184c01e51b474c59beb7682922ef162ccd46458eb3a5dba027" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.133937 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"373c5119de4de7184c01e51b474c59beb7682922ef162ccd46458eb3a5dba027"} err="failed to get container status \"373c5119de4de7184c01e51b474c59beb7682922ef162ccd46458eb3a5dba027\": rpc error: code = NotFound desc = could not find container \"373c5119de4de7184c01e51b474c59beb7682922ef162ccd46458eb3a5dba027\": container with ID starting with 373c5119de4de7184c01e51b474c59beb7682922ef162ccd46458eb3a5dba027 not found: ID does not exist" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.134019 4788 scope.go:117] "RemoveContainer" containerID="b7b50e0df525d3f1c50f5fdf7beac4bee7104b85c2982a13f94eb9a5dd9d045b" Dec 11 10:11:38 crc kubenswrapper[4788]: E1211 10:11:38.134382 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7b50e0df525d3f1c50f5fdf7beac4bee7104b85c2982a13f94eb9a5dd9d045b\": container with ID starting with b7b50e0df525d3f1c50f5fdf7beac4bee7104b85c2982a13f94eb9a5dd9d045b not found: ID does not exist" containerID="b7b50e0df525d3f1c50f5fdf7beac4bee7104b85c2982a13f94eb9a5dd9d045b" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.134440 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7b50e0df525d3f1c50f5fdf7beac4bee7104b85c2982a13f94eb9a5dd9d045b"} err="failed to get container status \"b7b50e0df525d3f1c50f5fdf7beac4bee7104b85c2982a13f94eb9a5dd9d045b\": rpc error: code = NotFound desc = could not find container \"b7b50e0df525d3f1c50f5fdf7beac4bee7104b85c2982a13f94eb9a5dd9d045b\": container with ID starting with b7b50e0df525d3f1c50f5fdf7beac4bee7104b85c2982a13f94eb9a5dd9d045b not found: ID does not exist" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.150964 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gv5j9\" (UniqueName: \"kubernetes.io/projected/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-kube-api-access-gv5j9\") on node \"crc\" DevicePath \"\"" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.151003 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.151015 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4f094f5-cc0c-4f3f-81af-36f4887fae9c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.298475 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h9g2p"] Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.307829 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h9g2p"] Dec 11 10:11:38 crc kubenswrapper[4788]: I1211 10:11:38.522303 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" path="/var/lib/kubelet/pods/e4f094f5-cc0c-4f3f-81af-36f4887fae9c/volumes" Dec 11 10:11:48 crc kubenswrapper[4788]: I1211 10:11:48.058136 4788 generic.go:334] "Generic (PLEG): container finished" podID="5395fbbe-5f31-4c60-bee6-09b492d13e36" containerID="e9b4a80dbfcc14808a040f723ea08007ba8191a42550a75a8d276a5917699273" exitCode=0 Dec 11 10:11:48 crc kubenswrapper[4788]: I1211 10:11:48.058186 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" event={"ID":"5395fbbe-5f31-4c60-bee6-09b492d13e36","Type":"ContainerDied","Data":"e9b4a80dbfcc14808a040f723ea08007ba8191a42550a75a8d276a5917699273"} Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.522963 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.592774 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7x7hb\" (UniqueName: \"kubernetes.io/projected/5395fbbe-5f31-4c60-bee6-09b492d13e36-kube-api-access-7x7hb\") pod \"5395fbbe-5f31-4c60-bee6-09b492d13e36\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.592892 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-telemetry-combined-ca-bundle\") pod \"5395fbbe-5f31-4c60-bee6-09b492d13e36\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.592946 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ssh-key\") pod \"5395fbbe-5f31-4c60-bee6-09b492d13e36\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.592976 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-inventory\") pod \"5395fbbe-5f31-4c60-bee6-09b492d13e36\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.593018 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-1\") pod \"5395fbbe-5f31-4c60-bee6-09b492d13e36\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.593120 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-2\") pod \"5395fbbe-5f31-4c60-bee6-09b492d13e36\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.593222 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-0\") pod \"5395fbbe-5f31-4c60-bee6-09b492d13e36\" (UID: \"5395fbbe-5f31-4c60-bee6-09b492d13e36\") " Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.600482 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "5395fbbe-5f31-4c60-bee6-09b492d13e36" (UID: "5395fbbe-5f31-4c60-bee6-09b492d13e36"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.600645 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5395fbbe-5f31-4c60-bee6-09b492d13e36-kube-api-access-7x7hb" (OuterVolumeSpecName: "kube-api-access-7x7hb") pod "5395fbbe-5f31-4c60-bee6-09b492d13e36" (UID: "5395fbbe-5f31-4c60-bee6-09b492d13e36"). InnerVolumeSpecName "kube-api-access-7x7hb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.626408 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "5395fbbe-5f31-4c60-bee6-09b492d13e36" (UID: "5395fbbe-5f31-4c60-bee6-09b492d13e36"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.626777 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "5395fbbe-5f31-4c60-bee6-09b492d13e36" (UID: "5395fbbe-5f31-4c60-bee6-09b492d13e36"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.628251 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5395fbbe-5f31-4c60-bee6-09b492d13e36" (UID: "5395fbbe-5f31-4c60-bee6-09b492d13e36"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.629407 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "5395fbbe-5f31-4c60-bee6-09b492d13e36" (UID: "5395fbbe-5f31-4c60-bee6-09b492d13e36"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.635450 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-inventory" (OuterVolumeSpecName: "inventory") pod "5395fbbe-5f31-4c60-bee6-09b492d13e36" (UID: "5395fbbe-5f31-4c60-bee6-09b492d13e36"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.697360 4788 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.697694 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7x7hb\" (UniqueName: \"kubernetes.io/projected/5395fbbe-5f31-4c60-bee6-09b492d13e36-kube-api-access-7x7hb\") on node \"crc\" DevicePath \"\"" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.697707 4788 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.697724 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.697734 4788 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-inventory\") on node \"crc\" DevicePath \"\"" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.697743 4788 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 11 10:11:49 crc kubenswrapper[4788]: I1211 10:11:49.697752 4788 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/5395fbbe-5f31-4c60-bee6-09b492d13e36-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 11 10:11:50 crc kubenswrapper[4788]: I1211 10:11:50.080839 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" event={"ID":"5395fbbe-5f31-4c60-bee6-09b492d13e36","Type":"ContainerDied","Data":"0a34fee7ca44d5c58983b99c8280a8e29e7464f2515924648015ca1149669e20"} Dec 11 10:11:50 crc kubenswrapper[4788]: I1211 10:11:50.080896 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a34fee7ca44d5c58983b99c8280a8e29e7464f2515924648015ca1149669e20" Dec 11 10:11:50 crc kubenswrapper[4788]: I1211 10:11:50.080927 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7" Dec 11 10:11:50 crc kubenswrapper[4788]: I1211 10:11:50.502668 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:11:50 crc kubenswrapper[4788]: E1211 10:11:50.502998 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:12:05 crc kubenswrapper[4788]: I1211 10:12:05.496188 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:12:05 crc kubenswrapper[4788]: E1211 10:12:05.497097 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:12:19 crc kubenswrapper[4788]: I1211 10:12:19.497616 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:12:19 crc kubenswrapper[4788]: E1211 10:12:19.499729 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:12:31 crc kubenswrapper[4788]: I1211 10:12:31.503544 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:12:31 crc kubenswrapper[4788]: E1211 10:12:31.505077 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:12:45 crc kubenswrapper[4788]: I1211 10:12:45.497677 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:12:45 crc kubenswrapper[4788]: E1211 10:12:45.498657 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.439524 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 11 10:12:48 crc kubenswrapper[4788]: E1211 10:12:48.440504 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5395fbbe-5f31-4c60-bee6-09b492d13e36" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.440521 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="5395fbbe-5f31-4c60-bee6-09b492d13e36" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 11 10:12:48 crc kubenswrapper[4788]: E1211 10:12:48.440538 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" containerName="extract-utilities" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.440544 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" containerName="extract-utilities" Dec 11 10:12:48 crc kubenswrapper[4788]: E1211 10:12:48.440559 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" containerName="extract-content" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.440566 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" containerName="extract-content" Dec 11 10:12:48 crc kubenswrapper[4788]: E1211 10:12:48.440591 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" containerName="registry-server" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.440598 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" containerName="registry-server" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.440791 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="5395fbbe-5f31-4c60-bee6-09b492d13e36" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.440806 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4f094f5-cc0c-4f3f-81af-36f4887fae9c" containerName="registry-server" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.441637 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.446871 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.446984 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-c9zcn" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.447106 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.447491 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.450071 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.534024 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.534502 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.534848 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-config-data\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.636913 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.637000 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.637049 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.637140 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.637192 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.637251 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.637410 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-config-data\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.637508 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pft9\" (UniqueName: \"kubernetes.io/projected/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-kube-api-access-9pft9\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.637552 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.639478 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.639709 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-config-data\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.643640 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.739405 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.739869 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.740042 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.740188 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.740370 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pft9\" (UniqueName: \"kubernetes.io/projected/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-kube-api-access-9pft9\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.740471 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.740575 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.739940 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.741472 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.746295 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.751031 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.760025 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pft9\" (UniqueName: \"kubernetes.io/projected/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-kube-api-access-9pft9\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:48 crc kubenswrapper[4788]: I1211 10:12:48.775295 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " pod="openstack/tempest-tests-tempest" Dec 11 10:12:49 crc kubenswrapper[4788]: I1211 10:12:49.073755 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 11 10:12:49 crc kubenswrapper[4788]: I1211 10:12:49.570204 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 11 10:12:49 crc kubenswrapper[4788]: I1211 10:12:49.763798 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5cf588e9-b8ff-4862-9a71-8e6805d89b2a","Type":"ContainerStarted","Data":"33d5f2dc4a61cdde8022fd20dcbeac0d56fea932bcdb38cd0d20b1d78befeccc"} Dec 11 10:12:58 crc kubenswrapper[4788]: I1211 10:12:58.496424 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:12:58 crc kubenswrapper[4788]: E1211 10:12:58.497607 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:13:09 crc kubenswrapper[4788]: I1211 10:13:09.495623 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:13:09 crc kubenswrapper[4788]: E1211 10:13:09.497087 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:13:21 crc kubenswrapper[4788]: I1211 10:13:21.496243 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:13:21 crc kubenswrapper[4788]: E1211 10:13:21.497254 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:13:24 crc kubenswrapper[4788]: E1211 10:13:24.024255 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 11 10:13:24 crc kubenswrapper[4788]: E1211 10:13:24.024913 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9pft9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(5cf588e9-b8ff-4862-9a71-8e6805d89b2a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 10:13:24 crc kubenswrapper[4788]: E1211 10:13:24.026395 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="5cf588e9-b8ff-4862-9a71-8e6805d89b2a" Dec 11 10:13:24 crc kubenswrapper[4788]: E1211 10:13:24.211193 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="5cf588e9-b8ff-4862-9a71-8e6805d89b2a" Dec 11 10:13:35 crc kubenswrapper[4788]: I1211 10:13:35.495630 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:13:35 crc kubenswrapper[4788]: E1211 10:13:35.496711 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:13:37 crc kubenswrapper[4788]: I1211 10:13:37.273711 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 11 10:13:38 crc kubenswrapper[4788]: I1211 10:13:38.350220 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5cf588e9-b8ff-4862-9a71-8e6805d89b2a","Type":"ContainerStarted","Data":"30cda3f5b59442cd24f40f7c7381efe9cc2c6403f09a080726bcd1073c9a4071"} Dec 11 10:13:38 crc kubenswrapper[4788]: I1211 10:13:38.374270 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.67904099 podStartE2EDuration="51.374216784s" podCreationTimestamp="2025-12-11 10:12:47 +0000 UTC" firstStartedPulling="2025-12-11 10:12:49.574273742 +0000 UTC m=+3099.645053328" lastFinishedPulling="2025-12-11 10:13:37.269449536 +0000 UTC m=+3147.340229122" observedRunningTime="2025-12-11 10:13:38.36851106 +0000 UTC m=+3148.439290666" watchObservedRunningTime="2025-12-11 10:13:38.374216784 +0000 UTC m=+3148.444996370" Dec 11 10:13:48 crc kubenswrapper[4788]: I1211 10:13:48.496710 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:13:48 crc kubenswrapper[4788]: E1211 10:13:48.497641 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:14:01 crc kubenswrapper[4788]: I1211 10:14:01.495878 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:14:01 crc kubenswrapper[4788]: E1211 10:14:01.497862 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:14:15 crc kubenswrapper[4788]: I1211 10:14:15.503683 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:14:15 crc kubenswrapper[4788]: E1211 10:14:15.505117 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:14:28 crc kubenswrapper[4788]: I1211 10:14:28.501514 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:14:28 crc kubenswrapper[4788]: E1211 10:14:28.502373 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:14:40 crc kubenswrapper[4788]: I1211 10:14:40.507253 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:14:40 crc kubenswrapper[4788]: E1211 10:14:40.508253 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:14:51 crc kubenswrapper[4788]: I1211 10:14:51.495672 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:14:51 crc kubenswrapper[4788]: E1211 10:14:51.496665 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.158884 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt"] Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.161719 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.167000 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.167000 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.174510 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt"] Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.258662 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb683a84-06d9-4ed3-806a-b6cd4b920155-secret-volume\") pod \"collect-profiles-29424135-l8hwt\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.259033 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb683a84-06d9-4ed3-806a-b6cd4b920155-config-volume\") pod \"collect-profiles-29424135-l8hwt\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.259621 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzw5n\" (UniqueName: \"kubernetes.io/projected/cb683a84-06d9-4ed3-806a-b6cd4b920155-kube-api-access-rzw5n\") pod \"collect-profiles-29424135-l8hwt\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.361985 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb683a84-06d9-4ed3-806a-b6cd4b920155-config-volume\") pod \"collect-profiles-29424135-l8hwt\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.362203 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzw5n\" (UniqueName: \"kubernetes.io/projected/cb683a84-06d9-4ed3-806a-b6cd4b920155-kube-api-access-rzw5n\") pod \"collect-profiles-29424135-l8hwt\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.362367 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb683a84-06d9-4ed3-806a-b6cd4b920155-secret-volume\") pod \"collect-profiles-29424135-l8hwt\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.363431 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb683a84-06d9-4ed3-806a-b6cd4b920155-config-volume\") pod \"collect-profiles-29424135-l8hwt\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.380484 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb683a84-06d9-4ed3-806a-b6cd4b920155-secret-volume\") pod \"collect-profiles-29424135-l8hwt\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.383313 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzw5n\" (UniqueName: \"kubernetes.io/projected/cb683a84-06d9-4ed3-806a-b6cd4b920155-kube-api-access-rzw5n\") pod \"collect-profiles-29424135-l8hwt\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:00 crc kubenswrapper[4788]: I1211 10:15:00.496676 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:01 crc kubenswrapper[4788]: I1211 10:15:01.023343 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt"] Dec 11 10:15:01 crc kubenswrapper[4788]: I1211 10:15:01.142418 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" event={"ID":"cb683a84-06d9-4ed3-806a-b6cd4b920155","Type":"ContainerStarted","Data":"15638e726aba271ab8b547ead59f08d1a8e59ad18ef22ff0c12ae3f6556b9a0d"} Dec 11 10:15:02 crc kubenswrapper[4788]: I1211 10:15:02.153080 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" event={"ID":"cb683a84-06d9-4ed3-806a-b6cd4b920155","Type":"ContainerStarted","Data":"090c53c95f09d13a07a6a79d35939385e52468769eced143fd69664d7cb0ceed"} Dec 11 10:15:02 crc kubenswrapper[4788]: I1211 10:15:02.177889 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" podStartSLOduration=2.17786621 podStartE2EDuration="2.17786621s" podCreationTimestamp="2025-12-11 10:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:15:02.171434208 +0000 UTC m=+3232.242213804" watchObservedRunningTime="2025-12-11 10:15:02.17786621 +0000 UTC m=+3232.248645796" Dec 11 10:15:02 crc kubenswrapper[4788]: I1211 10:15:02.496672 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:15:02 crc kubenswrapper[4788]: E1211 10:15:02.496993 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:15:03 crc kubenswrapper[4788]: I1211 10:15:03.184178 4788 generic.go:334] "Generic (PLEG): container finished" podID="cb683a84-06d9-4ed3-806a-b6cd4b920155" containerID="090c53c95f09d13a07a6a79d35939385e52468769eced143fd69664d7cb0ceed" exitCode=0 Dec 11 10:15:03 crc kubenswrapper[4788]: I1211 10:15:03.184292 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" event={"ID":"cb683a84-06d9-4ed3-806a-b6cd4b920155","Type":"ContainerDied","Data":"090c53c95f09d13a07a6a79d35939385e52468769eced143fd69664d7cb0ceed"} Dec 11 10:15:04 crc kubenswrapper[4788]: I1211 10:15:04.637358 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:04 crc kubenswrapper[4788]: I1211 10:15:04.760802 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb683a84-06d9-4ed3-806a-b6cd4b920155-secret-volume\") pod \"cb683a84-06d9-4ed3-806a-b6cd4b920155\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " Dec 11 10:15:04 crc kubenswrapper[4788]: I1211 10:15:04.760927 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb683a84-06d9-4ed3-806a-b6cd4b920155-config-volume\") pod \"cb683a84-06d9-4ed3-806a-b6cd4b920155\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " Dec 11 10:15:04 crc kubenswrapper[4788]: I1211 10:15:04.760968 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzw5n\" (UniqueName: \"kubernetes.io/projected/cb683a84-06d9-4ed3-806a-b6cd4b920155-kube-api-access-rzw5n\") pod \"cb683a84-06d9-4ed3-806a-b6cd4b920155\" (UID: \"cb683a84-06d9-4ed3-806a-b6cd4b920155\") " Dec 11 10:15:04 crc kubenswrapper[4788]: I1211 10:15:04.762200 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb683a84-06d9-4ed3-806a-b6cd4b920155-config-volume" (OuterVolumeSpecName: "config-volume") pod "cb683a84-06d9-4ed3-806a-b6cd4b920155" (UID: "cb683a84-06d9-4ed3-806a-b6cd4b920155"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:15:04 crc kubenswrapper[4788]: I1211 10:15:04.768604 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb683a84-06d9-4ed3-806a-b6cd4b920155-kube-api-access-rzw5n" (OuterVolumeSpecName: "kube-api-access-rzw5n") pod "cb683a84-06d9-4ed3-806a-b6cd4b920155" (UID: "cb683a84-06d9-4ed3-806a-b6cd4b920155"). InnerVolumeSpecName "kube-api-access-rzw5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:15:04 crc kubenswrapper[4788]: I1211 10:15:04.771449 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb683a84-06d9-4ed3-806a-b6cd4b920155-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "cb683a84-06d9-4ed3-806a-b6cd4b920155" (UID: "cb683a84-06d9-4ed3-806a-b6cd4b920155"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:15:04 crc kubenswrapper[4788]: I1211 10:15:04.863912 4788 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb683a84-06d9-4ed3-806a-b6cd4b920155-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:15:04 crc kubenswrapper[4788]: I1211 10:15:04.864296 4788 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb683a84-06d9-4ed3-806a-b6cd4b920155-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:15:04 crc kubenswrapper[4788]: I1211 10:15:04.864313 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzw5n\" (UniqueName: \"kubernetes.io/projected/cb683a84-06d9-4ed3-806a-b6cd4b920155-kube-api-access-rzw5n\") on node \"crc\" DevicePath \"\"" Dec 11 10:15:05 crc kubenswrapper[4788]: I1211 10:15:05.203801 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" event={"ID":"cb683a84-06d9-4ed3-806a-b6cd4b920155","Type":"ContainerDied","Data":"15638e726aba271ab8b547ead59f08d1a8e59ad18ef22ff0c12ae3f6556b9a0d"} Dec 11 10:15:05 crc kubenswrapper[4788]: I1211 10:15:05.203852 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15638e726aba271ab8b547ead59f08d1a8e59ad18ef22ff0c12ae3f6556b9a0d" Dec 11 10:15:05 crc kubenswrapper[4788]: I1211 10:15:05.203864 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424135-l8hwt" Dec 11 10:15:05 crc kubenswrapper[4788]: I1211 10:15:05.253381 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj"] Dec 11 10:15:05 crc kubenswrapper[4788]: I1211 10:15:05.264831 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424090-p2svj"] Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.162154 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tmjrw"] Dec 11 10:15:06 crc kubenswrapper[4788]: E1211 10:15:06.162763 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb683a84-06d9-4ed3-806a-b6cd4b920155" containerName="collect-profiles" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.162786 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb683a84-06d9-4ed3-806a-b6cd4b920155" containerName="collect-profiles" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.163049 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb683a84-06d9-4ed3-806a-b6cd4b920155" containerName="collect-profiles" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.166165 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.175068 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tmjrw"] Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.297310 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fmkf\" (UniqueName: \"kubernetes.io/projected/59256e55-52ad-435a-8996-30d1ec1a0e18-kube-api-access-2fmkf\") pod \"certified-operators-tmjrw\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.299454 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-catalog-content\") pod \"certified-operators-tmjrw\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.299553 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-utilities\") pod \"certified-operators-tmjrw\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.401815 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-catalog-content\") pod \"certified-operators-tmjrw\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.401892 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-utilities\") pod \"certified-operators-tmjrw\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.402018 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fmkf\" (UniqueName: \"kubernetes.io/projected/59256e55-52ad-435a-8996-30d1ec1a0e18-kube-api-access-2fmkf\") pod \"certified-operators-tmjrw\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.402812 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-catalog-content\") pod \"certified-operators-tmjrw\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.402847 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-utilities\") pod \"certified-operators-tmjrw\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.424513 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fmkf\" (UniqueName: \"kubernetes.io/projected/59256e55-52ad-435a-8996-30d1ec1a0e18-kube-api-access-2fmkf\") pod \"certified-operators-tmjrw\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.501037 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:06 crc kubenswrapper[4788]: I1211 10:15:06.524686 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f4f5bdd-8029-4aa6-ac75-0160d3250669" path="/var/lib/kubelet/pods/1f4f5bdd-8029-4aa6-ac75-0160d3250669/volumes" Dec 11 10:15:07 crc kubenswrapper[4788]: I1211 10:15:07.071372 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tmjrw"] Dec 11 10:15:07 crc kubenswrapper[4788]: I1211 10:15:07.230607 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmjrw" event={"ID":"59256e55-52ad-435a-8996-30d1ec1a0e18","Type":"ContainerStarted","Data":"6756e0ca846831ee4d96dc64b4ddaf0a23c1c82c3267dc2740b0e025f0e1ef0e"} Dec 11 10:15:08 crc kubenswrapper[4788]: I1211 10:15:08.242918 4788 generic.go:334] "Generic (PLEG): container finished" podID="59256e55-52ad-435a-8996-30d1ec1a0e18" containerID="3d9c73f1bd839896066dc0b8b79fd7fe2e671a792203db123e10dfea8ea0ef06" exitCode=0 Dec 11 10:15:08 crc kubenswrapper[4788]: I1211 10:15:08.242994 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmjrw" event={"ID":"59256e55-52ad-435a-8996-30d1ec1a0e18","Type":"ContainerDied","Data":"3d9c73f1bd839896066dc0b8b79fd7fe2e671a792203db123e10dfea8ea0ef06"} Dec 11 10:15:16 crc kubenswrapper[4788]: I1211 10:15:16.495683 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:15:16 crc kubenswrapper[4788]: E1211 10:15:16.497795 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:15:21 crc kubenswrapper[4788]: E1211 10:15:21.641151 4788 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 11 10:15:21 crc kubenswrapper[4788]: E1211 10:15:21.641881 4788 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2fmkf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-tmjrw_openshift-marketplace(59256e55-52ad-435a-8996-30d1ec1a0e18): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 10:15:21 crc kubenswrapper[4788]: E1211 10:15:21.643169 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-tmjrw" podUID="59256e55-52ad-435a-8996-30d1ec1a0e18" Dec 11 10:15:22 crc kubenswrapper[4788]: E1211 10:15:22.391904 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-tmjrw" podUID="59256e55-52ad-435a-8996-30d1ec1a0e18" Dec 11 10:15:29 crc kubenswrapper[4788]: I1211 10:15:29.496178 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:15:29 crc kubenswrapper[4788]: E1211 10:15:29.497181 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:15:38 crc kubenswrapper[4788]: I1211 10:15:38.569515 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmjrw" event={"ID":"59256e55-52ad-435a-8996-30d1ec1a0e18","Type":"ContainerStarted","Data":"54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c"} Dec 11 10:15:39 crc kubenswrapper[4788]: I1211 10:15:39.582817 4788 generic.go:334] "Generic (PLEG): container finished" podID="59256e55-52ad-435a-8996-30d1ec1a0e18" containerID="54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c" exitCode=0 Dec 11 10:15:39 crc kubenswrapper[4788]: I1211 10:15:39.582921 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmjrw" event={"ID":"59256e55-52ad-435a-8996-30d1ec1a0e18","Type":"ContainerDied","Data":"54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c"} Dec 11 10:15:40 crc kubenswrapper[4788]: I1211 10:15:40.596536 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmjrw" event={"ID":"59256e55-52ad-435a-8996-30d1ec1a0e18","Type":"ContainerStarted","Data":"7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f"} Dec 11 10:15:40 crc kubenswrapper[4788]: I1211 10:15:40.628386 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tmjrw" podStartSLOduration=2.546783594 podStartE2EDuration="34.628361878s" podCreationTimestamp="2025-12-11 10:15:06 +0000 UTC" firstStartedPulling="2025-12-11 10:15:08.24624348 +0000 UTC m=+3238.317023066" lastFinishedPulling="2025-12-11 10:15:40.327821764 +0000 UTC m=+3270.398601350" observedRunningTime="2025-12-11 10:15:40.616904889 +0000 UTC m=+3270.687684475" watchObservedRunningTime="2025-12-11 10:15:40.628361878 +0000 UTC m=+3270.699141464" Dec 11 10:15:41 crc kubenswrapper[4788]: I1211 10:15:41.496874 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:15:41 crc kubenswrapper[4788]: E1211 10:15:41.497705 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:15:46 crc kubenswrapper[4788]: I1211 10:15:46.508923 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:46 crc kubenswrapper[4788]: I1211 10:15:46.509588 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:46 crc kubenswrapper[4788]: I1211 10:15:46.553180 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:46 crc kubenswrapper[4788]: I1211 10:15:46.703115 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:15:46 crc kubenswrapper[4788]: I1211 10:15:46.775508 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tmjrw"] Dec 11 10:15:46 crc kubenswrapper[4788]: I1211 10:15:46.821803 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rf478"] Dec 11 10:15:46 crc kubenswrapper[4788]: I1211 10:15:46.822126 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rf478" podUID="5aa6985d-a538-4476-8aa5-38ccb947cb50" containerName="registry-server" containerID="cri-o://20b80b8bcd33abb3f0d53447d58c64b096264d67adf657bbb0f694333160bc41" gracePeriod=2 Dec 11 10:15:47 crc kubenswrapper[4788]: I1211 10:15:47.100413 4788 scope.go:117] "RemoveContainer" containerID="f2b71bfe64f0904d44664492c2564a573ff1b84e56c1bc267e70ae3451537fb4" Dec 11 10:15:47 crc kubenswrapper[4788]: I1211 10:15:47.708769 4788 generic.go:334] "Generic (PLEG): container finished" podID="5aa6985d-a538-4476-8aa5-38ccb947cb50" containerID="20b80b8bcd33abb3f0d53447d58c64b096264d67adf657bbb0f694333160bc41" exitCode=0 Dec 11 10:15:47 crc kubenswrapper[4788]: I1211 10:15:47.709677 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf478" event={"ID":"5aa6985d-a538-4476-8aa5-38ccb947cb50","Type":"ContainerDied","Data":"20b80b8bcd33abb3f0d53447d58c64b096264d67adf657bbb0f694333160bc41"} Dec 11 10:15:47 crc kubenswrapper[4788]: I1211 10:15:47.940149 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rf478" Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.045854 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-utilities\") pod \"5aa6985d-a538-4476-8aa5-38ccb947cb50\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.046077 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-catalog-content\") pod \"5aa6985d-a538-4476-8aa5-38ccb947cb50\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.046143 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wfhj\" (UniqueName: \"kubernetes.io/projected/5aa6985d-a538-4476-8aa5-38ccb947cb50-kube-api-access-9wfhj\") pod \"5aa6985d-a538-4476-8aa5-38ccb947cb50\" (UID: \"5aa6985d-a538-4476-8aa5-38ccb947cb50\") " Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.046741 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-utilities" (OuterVolumeSpecName: "utilities") pod "5aa6985d-a538-4476-8aa5-38ccb947cb50" (UID: "5aa6985d-a538-4476-8aa5-38ccb947cb50"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.061603 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aa6985d-a538-4476-8aa5-38ccb947cb50-kube-api-access-9wfhj" (OuterVolumeSpecName: "kube-api-access-9wfhj") pod "5aa6985d-a538-4476-8aa5-38ccb947cb50" (UID: "5aa6985d-a538-4476-8aa5-38ccb947cb50"). InnerVolumeSpecName "kube-api-access-9wfhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.102709 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5aa6985d-a538-4476-8aa5-38ccb947cb50" (UID: "5aa6985d-a538-4476-8aa5-38ccb947cb50"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.148794 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.148844 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5aa6985d-a538-4476-8aa5-38ccb947cb50-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.148863 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wfhj\" (UniqueName: \"kubernetes.io/projected/5aa6985d-a538-4476-8aa5-38ccb947cb50-kube-api-access-9wfhj\") on node \"crc\" DevicePath \"\"" Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.722053 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rf478" event={"ID":"5aa6985d-a538-4476-8aa5-38ccb947cb50","Type":"ContainerDied","Data":"3196e3828f94ec3a3fff29f55f0c90116d3e3c41558b0a04a39ccf6a256bc3d5"} Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.722134 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rf478" Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.722422 4788 scope.go:117] "RemoveContainer" containerID="20b80b8bcd33abb3f0d53447d58c64b096264d67adf657bbb0f694333160bc41" Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.756881 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rf478"] Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.766008 4788 scope.go:117] "RemoveContainer" containerID="1e3ebdd4464cadb2dd9277be12572b50ef79c44bc51ecfeb251b615395216b1b" Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.767410 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rf478"] Dec 11 10:15:48 crc kubenswrapper[4788]: I1211 10:15:48.793006 4788 scope.go:117] "RemoveContainer" containerID="3a4bf6f8bbe14018ea73861e1f1997316585c585a2c7b3897960db6ba64f4bbc" Dec 11 10:15:50 crc kubenswrapper[4788]: I1211 10:15:50.510600 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5aa6985d-a538-4476-8aa5-38ccb947cb50" path="/var/lib/kubelet/pods/5aa6985d-a538-4476-8aa5-38ccb947cb50/volumes" Dec 11 10:15:56 crc kubenswrapper[4788]: I1211 10:15:56.496868 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:15:56 crc kubenswrapper[4788]: E1211 10:15:56.497701 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:16:10 crc kubenswrapper[4788]: I1211 10:16:10.503708 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:16:10 crc kubenswrapper[4788]: E1211 10:16:10.506273 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:16:23 crc kubenswrapper[4788]: I1211 10:16:23.496576 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:16:24 crc kubenswrapper[4788]: I1211 10:16:24.127130 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"324fe31631691b87cae3df8fcfed8541fe8fc5e8b88ea7b7fd9527dca5f93f1d"} Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.108955 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-shvzz"] Dec 11 10:18:09 crc kubenswrapper[4788]: E1211 10:18:09.110071 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aa6985d-a538-4476-8aa5-38ccb947cb50" containerName="extract-utilities" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.110092 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aa6985d-a538-4476-8aa5-38ccb947cb50" containerName="extract-utilities" Dec 11 10:18:09 crc kubenswrapper[4788]: E1211 10:18:09.110120 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aa6985d-a538-4476-8aa5-38ccb947cb50" containerName="registry-server" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.110129 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aa6985d-a538-4476-8aa5-38ccb947cb50" containerName="registry-server" Dec 11 10:18:09 crc kubenswrapper[4788]: E1211 10:18:09.110148 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aa6985d-a538-4476-8aa5-38ccb947cb50" containerName="extract-content" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.110155 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aa6985d-a538-4476-8aa5-38ccb947cb50" containerName="extract-content" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.110664 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aa6985d-a538-4476-8aa5-38ccb947cb50" containerName="registry-server" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.114135 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.168765 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-shvzz"] Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.223738 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-utilities\") pod \"redhat-operators-shvzz\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.223795 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-catalog-content\") pod \"redhat-operators-shvzz\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.223892 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25gt8\" (UniqueName: \"kubernetes.io/projected/d5db404c-c3e3-432f-8df7-ea1936db6d10-kube-api-access-25gt8\") pod \"redhat-operators-shvzz\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.329812 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25gt8\" (UniqueName: \"kubernetes.io/projected/d5db404c-c3e3-432f-8df7-ea1936db6d10-kube-api-access-25gt8\") pod \"redhat-operators-shvzz\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.330326 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-utilities\") pod \"redhat-operators-shvzz\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.330520 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-catalog-content\") pod \"redhat-operators-shvzz\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.331227 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-catalog-content\") pod \"redhat-operators-shvzz\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.331691 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-utilities\") pod \"redhat-operators-shvzz\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.353157 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25gt8\" (UniqueName: \"kubernetes.io/projected/d5db404c-c3e3-432f-8df7-ea1936db6d10-kube-api-access-25gt8\") pod \"redhat-operators-shvzz\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.463941 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:09 crc kubenswrapper[4788]: I1211 10:18:09.833631 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-shvzz"] Dec 11 10:18:10 crc kubenswrapper[4788]: I1211 10:18:10.240151 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shvzz" event={"ID":"d5db404c-c3e3-432f-8df7-ea1936db6d10","Type":"ContainerStarted","Data":"c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a"} Dec 11 10:18:10 crc kubenswrapper[4788]: I1211 10:18:10.240216 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shvzz" event={"ID":"d5db404c-c3e3-432f-8df7-ea1936db6d10","Type":"ContainerStarted","Data":"f98d593641e7498c531ae1f1f4a3a93ea6a822e83ab5b9b6d72db6d98044aa03"} Dec 11 10:18:11 crc kubenswrapper[4788]: I1211 10:18:11.252797 4788 generic.go:334] "Generic (PLEG): container finished" podID="d5db404c-c3e3-432f-8df7-ea1936db6d10" containerID="c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a" exitCode=0 Dec 11 10:18:11 crc kubenswrapper[4788]: I1211 10:18:11.252882 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shvzz" event={"ID":"d5db404c-c3e3-432f-8df7-ea1936db6d10","Type":"ContainerDied","Data":"c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a"} Dec 11 10:18:11 crc kubenswrapper[4788]: I1211 10:18:11.257393 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:18:13 crc kubenswrapper[4788]: I1211 10:18:13.280084 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shvzz" event={"ID":"d5db404c-c3e3-432f-8df7-ea1936db6d10","Type":"ContainerStarted","Data":"808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106"} Dec 11 10:18:17 crc kubenswrapper[4788]: I1211 10:18:17.315880 4788 generic.go:334] "Generic (PLEG): container finished" podID="d5db404c-c3e3-432f-8df7-ea1936db6d10" containerID="808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106" exitCode=0 Dec 11 10:18:17 crc kubenswrapper[4788]: I1211 10:18:17.315984 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shvzz" event={"ID":"d5db404c-c3e3-432f-8df7-ea1936db6d10","Type":"ContainerDied","Data":"808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106"} Dec 11 10:18:20 crc kubenswrapper[4788]: I1211 10:18:20.345722 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shvzz" event={"ID":"d5db404c-c3e3-432f-8df7-ea1936db6d10","Type":"ContainerStarted","Data":"fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e"} Dec 11 10:18:20 crc kubenswrapper[4788]: I1211 10:18:20.372007 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-shvzz" podStartSLOduration=2.761208671 podStartE2EDuration="11.371982845s" podCreationTimestamp="2025-12-11 10:18:09 +0000 UTC" firstStartedPulling="2025-12-11 10:18:11.257066559 +0000 UTC m=+3421.327846145" lastFinishedPulling="2025-12-11 10:18:19.867840733 +0000 UTC m=+3429.938620319" observedRunningTime="2025-12-11 10:18:20.366551508 +0000 UTC m=+3430.437331094" watchObservedRunningTime="2025-12-11 10:18:20.371982845 +0000 UTC m=+3430.442762431" Dec 11 10:18:29 crc kubenswrapper[4788]: I1211 10:18:29.464655 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:29 crc kubenswrapper[4788]: I1211 10:18:29.465252 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:29 crc kubenswrapper[4788]: I1211 10:18:29.519965 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:30 crc kubenswrapper[4788]: I1211 10:18:30.483059 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:33 crc kubenswrapper[4788]: I1211 10:18:33.557785 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-shvzz"] Dec 11 10:18:33 crc kubenswrapper[4788]: I1211 10:18:33.558720 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-shvzz" podUID="d5db404c-c3e3-432f-8df7-ea1936db6d10" containerName="registry-server" containerID="cri-o://fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e" gracePeriod=2 Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.103977 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.304684 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-utilities\") pod \"d5db404c-c3e3-432f-8df7-ea1936db6d10\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.304817 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25gt8\" (UniqueName: \"kubernetes.io/projected/d5db404c-c3e3-432f-8df7-ea1936db6d10-kube-api-access-25gt8\") pod \"d5db404c-c3e3-432f-8df7-ea1936db6d10\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.304943 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-catalog-content\") pod \"d5db404c-c3e3-432f-8df7-ea1936db6d10\" (UID: \"d5db404c-c3e3-432f-8df7-ea1936db6d10\") " Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.305916 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-utilities" (OuterVolumeSpecName: "utilities") pod "d5db404c-c3e3-432f-8df7-ea1936db6d10" (UID: "d5db404c-c3e3-432f-8df7-ea1936db6d10"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.312080 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5db404c-c3e3-432f-8df7-ea1936db6d10-kube-api-access-25gt8" (OuterVolumeSpecName: "kube-api-access-25gt8") pod "d5db404c-c3e3-432f-8df7-ea1936db6d10" (UID: "d5db404c-c3e3-432f-8df7-ea1936db6d10"). InnerVolumeSpecName "kube-api-access-25gt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.410360 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.410415 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25gt8\" (UniqueName: \"kubernetes.io/projected/d5db404c-c3e3-432f-8df7-ea1936db6d10-kube-api-access-25gt8\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.440367 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d5db404c-c3e3-432f-8df7-ea1936db6d10" (UID: "d5db404c-c3e3-432f-8df7-ea1936db6d10"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.480615 4788 generic.go:334] "Generic (PLEG): container finished" podID="d5db404c-c3e3-432f-8df7-ea1936db6d10" containerID="fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e" exitCode=0 Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.480689 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shvzz" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.480710 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shvzz" event={"ID":"d5db404c-c3e3-432f-8df7-ea1936db6d10","Type":"ContainerDied","Data":"fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e"} Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.481500 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shvzz" event={"ID":"d5db404c-c3e3-432f-8df7-ea1936db6d10","Type":"ContainerDied","Data":"f98d593641e7498c531ae1f1f4a3a93ea6a822e83ab5b9b6d72db6d98044aa03"} Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.481596 4788 scope.go:117] "RemoveContainer" containerID="fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.508043 4788 scope.go:117] "RemoveContainer" containerID="808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.524834 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5db404c-c3e3-432f-8df7-ea1936db6d10-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.532279 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-shvzz"] Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.541968 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-shvzz"] Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.544978 4788 scope.go:117] "RemoveContainer" containerID="c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.599049 4788 scope.go:117] "RemoveContainer" containerID="fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e" Dec 11 10:18:34 crc kubenswrapper[4788]: E1211 10:18:34.599756 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e\": container with ID starting with fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e not found: ID does not exist" containerID="fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.599818 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e"} err="failed to get container status \"fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e\": rpc error: code = NotFound desc = could not find container \"fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e\": container with ID starting with fdc4723bca67de375329f4e7c4171c9ea0e3cbefd9f1455e4e866cd0c6b6616e not found: ID does not exist" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.599859 4788 scope.go:117] "RemoveContainer" containerID="808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106" Dec 11 10:18:34 crc kubenswrapper[4788]: E1211 10:18:34.600210 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106\": container with ID starting with 808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106 not found: ID does not exist" containerID="808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.600274 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106"} err="failed to get container status \"808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106\": rpc error: code = NotFound desc = could not find container \"808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106\": container with ID starting with 808de4d185fd8ffc74c6b7df11d63d9e1bcd3cc2e853a25fa75459168f408106 not found: ID does not exist" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.600301 4788 scope.go:117] "RemoveContainer" containerID="c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a" Dec 11 10:18:34 crc kubenswrapper[4788]: E1211 10:18:34.600651 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a\": container with ID starting with c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a not found: ID does not exist" containerID="c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a" Dec 11 10:18:34 crc kubenswrapper[4788]: I1211 10:18:34.600702 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a"} err="failed to get container status \"c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a\": rpc error: code = NotFound desc = could not find container \"c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a\": container with ID starting with c01afc1fadefd19dd2f0ef1c49b08b8536e5608e60c742bc645f50cde0f23a7a not found: ID does not exist" Dec 11 10:18:36 crc kubenswrapper[4788]: I1211 10:18:36.510312 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5db404c-c3e3-432f-8df7-ea1936db6d10" path="/var/lib/kubelet/pods/d5db404c-c3e3-432f-8df7-ea1936db6d10/volumes" Dec 11 10:18:49 crc kubenswrapper[4788]: I1211 10:18:49.701423 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n2nrg"] Dec 11 10:18:49 crc kubenswrapper[4788]: E1211 10:18:49.703800 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5db404c-c3e3-432f-8df7-ea1936db6d10" containerName="extract-content" Dec 11 10:18:49 crc kubenswrapper[4788]: I1211 10:18:49.703855 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5db404c-c3e3-432f-8df7-ea1936db6d10" containerName="extract-content" Dec 11 10:18:49 crc kubenswrapper[4788]: E1211 10:18:49.703940 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5db404c-c3e3-432f-8df7-ea1936db6d10" containerName="registry-server" Dec 11 10:18:49 crc kubenswrapper[4788]: I1211 10:18:49.703949 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5db404c-c3e3-432f-8df7-ea1936db6d10" containerName="registry-server" Dec 11 10:18:49 crc kubenswrapper[4788]: E1211 10:18:49.703974 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5db404c-c3e3-432f-8df7-ea1936db6d10" containerName="extract-utilities" Dec 11 10:18:49 crc kubenswrapper[4788]: I1211 10:18:49.703982 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5db404c-c3e3-432f-8df7-ea1936db6d10" containerName="extract-utilities" Dec 11 10:18:49 crc kubenswrapper[4788]: I1211 10:18:49.719611 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5db404c-c3e3-432f-8df7-ea1936db6d10" containerName="registry-server" Dec 11 10:18:49 crc kubenswrapper[4788]: I1211 10:18:49.723539 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:18:49 crc kubenswrapper[4788]: I1211 10:18:49.741463 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2nrg"] Dec 11 10:18:49 crc kubenswrapper[4788]: I1211 10:18:49.905041 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-utilities\") pod \"redhat-marketplace-n2nrg\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:18:49 crc kubenswrapper[4788]: I1211 10:18:49.905354 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-catalog-content\") pod \"redhat-marketplace-n2nrg\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:18:49 crc kubenswrapper[4788]: I1211 10:18:49.905473 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqwnp\" (UniqueName: \"kubernetes.io/projected/a869b596-97ec-475c-be51-d13b8eed7e82-kube-api-access-nqwnp\") pod \"redhat-marketplace-n2nrg\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:18:50 crc kubenswrapper[4788]: I1211 10:18:50.007681 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-catalog-content\") pod \"redhat-marketplace-n2nrg\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:18:50 crc kubenswrapper[4788]: I1211 10:18:50.007761 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqwnp\" (UniqueName: \"kubernetes.io/projected/a869b596-97ec-475c-be51-d13b8eed7e82-kube-api-access-nqwnp\") pod \"redhat-marketplace-n2nrg\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:18:50 crc kubenswrapper[4788]: I1211 10:18:50.007932 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-utilities\") pod \"redhat-marketplace-n2nrg\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:18:50 crc kubenswrapper[4788]: I1211 10:18:50.008559 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-catalog-content\") pod \"redhat-marketplace-n2nrg\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:18:50 crc kubenswrapper[4788]: I1211 10:18:50.008613 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-utilities\") pod \"redhat-marketplace-n2nrg\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:18:50 crc kubenswrapper[4788]: I1211 10:18:50.033587 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqwnp\" (UniqueName: \"kubernetes.io/projected/a869b596-97ec-475c-be51-d13b8eed7e82-kube-api-access-nqwnp\") pod \"redhat-marketplace-n2nrg\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:18:50 crc kubenswrapper[4788]: I1211 10:18:50.073798 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:18:50 crc kubenswrapper[4788]: I1211 10:18:50.654695 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2nrg"] Dec 11 10:18:51 crc kubenswrapper[4788]: I1211 10:18:51.369154 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:18:51 crc kubenswrapper[4788]: I1211 10:18:51.369537 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:18:51 crc kubenswrapper[4788]: I1211 10:18:51.681912 4788 generic.go:334] "Generic (PLEG): container finished" podID="a869b596-97ec-475c-be51-d13b8eed7e82" containerID="07077942a728d9f735773c3bcf28cdd86126864dfadcb59af0ed7e24162cce20" exitCode=0 Dec 11 10:18:51 crc kubenswrapper[4788]: I1211 10:18:51.682072 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2nrg" event={"ID":"a869b596-97ec-475c-be51-d13b8eed7e82","Type":"ContainerDied","Data":"07077942a728d9f735773c3bcf28cdd86126864dfadcb59af0ed7e24162cce20"} Dec 11 10:18:51 crc kubenswrapper[4788]: I1211 10:18:51.682948 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2nrg" event={"ID":"a869b596-97ec-475c-be51-d13b8eed7e82","Type":"ContainerStarted","Data":"b62d0f8e5e9cc7255982a77656536013552b1a957159f7a9549f23fe6301b548"} Dec 11 10:18:53 crc kubenswrapper[4788]: I1211 10:18:53.706413 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2nrg" event={"ID":"a869b596-97ec-475c-be51-d13b8eed7e82","Type":"ContainerStarted","Data":"58cfa91636796742db27b6f44bf4a8eda99f1c71d89ca4229f5c8f5dca15ffbb"} Dec 11 10:18:54 crc kubenswrapper[4788]: I1211 10:18:54.718888 4788 generic.go:334] "Generic (PLEG): container finished" podID="a869b596-97ec-475c-be51-d13b8eed7e82" containerID="58cfa91636796742db27b6f44bf4a8eda99f1c71d89ca4229f5c8f5dca15ffbb" exitCode=0 Dec 11 10:18:54 crc kubenswrapper[4788]: I1211 10:18:54.719005 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2nrg" event={"ID":"a869b596-97ec-475c-be51-d13b8eed7e82","Type":"ContainerDied","Data":"58cfa91636796742db27b6f44bf4a8eda99f1c71d89ca4229f5c8f5dca15ffbb"} Dec 11 10:18:57 crc kubenswrapper[4788]: I1211 10:18:57.763940 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2nrg" event={"ID":"a869b596-97ec-475c-be51-d13b8eed7e82","Type":"ContainerStarted","Data":"f04d27ddc95581ed3f834a46cf2d92ea2a26f639261c8f24da6965a89cbc3284"} Dec 11 10:19:00 crc kubenswrapper[4788]: I1211 10:19:00.825382 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n2nrg" podStartSLOduration=6.797923129 podStartE2EDuration="11.825357032s" podCreationTimestamp="2025-12-11 10:18:49 +0000 UTC" firstStartedPulling="2025-12-11 10:18:51.684260455 +0000 UTC m=+3461.755040041" lastFinishedPulling="2025-12-11 10:18:56.711694358 +0000 UTC m=+3466.782473944" observedRunningTime="2025-12-11 10:19:00.820075489 +0000 UTC m=+3470.890855095" watchObservedRunningTime="2025-12-11 10:19:00.825357032 +0000 UTC m=+3470.896136618" Dec 11 10:19:10 crc kubenswrapper[4788]: I1211 10:19:10.074379 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:19:10 crc kubenswrapper[4788]: I1211 10:19:10.075076 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:19:10 crc kubenswrapper[4788]: I1211 10:19:10.142619 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:19:10 crc kubenswrapper[4788]: I1211 10:19:10.960323 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:19:11 crc kubenswrapper[4788]: I1211 10:19:11.022628 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2nrg"] Dec 11 10:19:12 crc kubenswrapper[4788]: I1211 10:19:12.921749 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n2nrg" podUID="a869b596-97ec-475c-be51-d13b8eed7e82" containerName="registry-server" containerID="cri-o://f04d27ddc95581ed3f834a46cf2d92ea2a26f639261c8f24da6965a89cbc3284" gracePeriod=2 Dec 11 10:19:13 crc kubenswrapper[4788]: I1211 10:19:13.954007 4788 generic.go:334] "Generic (PLEG): container finished" podID="a869b596-97ec-475c-be51-d13b8eed7e82" containerID="f04d27ddc95581ed3f834a46cf2d92ea2a26f639261c8f24da6965a89cbc3284" exitCode=0 Dec 11 10:19:13 crc kubenswrapper[4788]: I1211 10:19:13.954205 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2nrg" event={"ID":"a869b596-97ec-475c-be51-d13b8eed7e82","Type":"ContainerDied","Data":"f04d27ddc95581ed3f834a46cf2d92ea2a26f639261c8f24da6965a89cbc3284"} Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.272311 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.354987 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-utilities\") pod \"a869b596-97ec-475c-be51-d13b8eed7e82\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.355508 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-catalog-content\") pod \"a869b596-97ec-475c-be51-d13b8eed7e82\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.355574 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqwnp\" (UniqueName: \"kubernetes.io/projected/a869b596-97ec-475c-be51-d13b8eed7e82-kube-api-access-nqwnp\") pod \"a869b596-97ec-475c-be51-d13b8eed7e82\" (UID: \"a869b596-97ec-475c-be51-d13b8eed7e82\") " Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.356576 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-utilities" (OuterVolumeSpecName: "utilities") pod "a869b596-97ec-475c-be51-d13b8eed7e82" (UID: "a869b596-97ec-475c-be51-d13b8eed7e82"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.363327 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a869b596-97ec-475c-be51-d13b8eed7e82-kube-api-access-nqwnp" (OuterVolumeSpecName: "kube-api-access-nqwnp") pod "a869b596-97ec-475c-be51-d13b8eed7e82" (UID: "a869b596-97ec-475c-be51-d13b8eed7e82"). InnerVolumeSpecName "kube-api-access-nqwnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.389112 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a869b596-97ec-475c-be51-d13b8eed7e82" (UID: "a869b596-97ec-475c-be51-d13b8eed7e82"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.458215 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.458286 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqwnp\" (UniqueName: \"kubernetes.io/projected/a869b596-97ec-475c-be51-d13b8eed7e82-kube-api-access-nqwnp\") on node \"crc\" DevicePath \"\"" Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.458302 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a869b596-97ec-475c-be51-d13b8eed7e82-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.993066 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2nrg" event={"ID":"a869b596-97ec-475c-be51-d13b8eed7e82","Type":"ContainerDied","Data":"b62d0f8e5e9cc7255982a77656536013552b1a957159f7a9549f23fe6301b548"} Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.993892 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2nrg" Dec 11 10:19:14 crc kubenswrapper[4788]: I1211 10:19:14.997644 4788 scope.go:117] "RemoveContainer" containerID="f04d27ddc95581ed3f834a46cf2d92ea2a26f639261c8f24da6965a89cbc3284" Dec 11 10:19:15 crc kubenswrapper[4788]: I1211 10:19:15.044715 4788 scope.go:117] "RemoveContainer" containerID="58cfa91636796742db27b6f44bf4a8eda99f1c71d89ca4229f5c8f5dca15ffbb" Dec 11 10:19:15 crc kubenswrapper[4788]: I1211 10:19:15.048371 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2nrg"] Dec 11 10:19:15 crc kubenswrapper[4788]: I1211 10:19:15.067884 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2nrg"] Dec 11 10:19:15 crc kubenswrapper[4788]: I1211 10:19:15.073329 4788 scope.go:117] "RemoveContainer" containerID="07077942a728d9f735773c3bcf28cdd86126864dfadcb59af0ed7e24162cce20" Dec 11 10:19:16 crc kubenswrapper[4788]: I1211 10:19:16.511273 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a869b596-97ec-475c-be51-d13b8eed7e82" path="/var/lib/kubelet/pods/a869b596-97ec-475c-be51-d13b8eed7e82/volumes" Dec 11 10:19:21 crc kubenswrapper[4788]: I1211 10:19:21.369491 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:19:21 crc kubenswrapper[4788]: I1211 10:19:21.370041 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:19:51 crc kubenswrapper[4788]: I1211 10:19:51.369117 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:19:51 crc kubenswrapper[4788]: I1211 10:19:51.369940 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:19:51 crc kubenswrapper[4788]: I1211 10:19:51.370012 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 10:19:51 crc kubenswrapper[4788]: I1211 10:19:51.371035 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"324fe31631691b87cae3df8fcfed8541fe8fc5e8b88ea7b7fd9527dca5f93f1d"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:19:51 crc kubenswrapper[4788]: I1211 10:19:51.371095 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://324fe31631691b87cae3df8fcfed8541fe8fc5e8b88ea7b7fd9527dca5f93f1d" gracePeriod=600 Dec 11 10:19:52 crc kubenswrapper[4788]: I1211 10:19:52.373337 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="324fe31631691b87cae3df8fcfed8541fe8fc5e8b88ea7b7fd9527dca5f93f1d" exitCode=0 Dec 11 10:19:52 crc kubenswrapper[4788]: I1211 10:19:52.373452 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"324fe31631691b87cae3df8fcfed8541fe8fc5e8b88ea7b7fd9527dca5f93f1d"} Dec 11 10:19:52 crc kubenswrapper[4788]: I1211 10:19:52.374027 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660"} Dec 11 10:19:52 crc kubenswrapper[4788]: I1211 10:19:52.374061 4788 scope.go:117] "RemoveContainer" containerID="205f988ef0682cb7a3b470e0dbb1c615d468b9fec9ddeb99a809db2be66cda7d" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.633335 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g4thw"] Dec 11 10:21:37 crc kubenswrapper[4788]: E1211 10:21:37.635182 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a869b596-97ec-475c-be51-d13b8eed7e82" containerName="registry-server" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.635206 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a869b596-97ec-475c-be51-d13b8eed7e82" containerName="registry-server" Dec 11 10:21:37 crc kubenswrapper[4788]: E1211 10:21:37.635221 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a869b596-97ec-475c-be51-d13b8eed7e82" containerName="extract-utilities" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.635252 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a869b596-97ec-475c-be51-d13b8eed7e82" containerName="extract-utilities" Dec 11 10:21:37 crc kubenswrapper[4788]: E1211 10:21:37.635278 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a869b596-97ec-475c-be51-d13b8eed7e82" containerName="extract-content" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.635286 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a869b596-97ec-475c-be51-d13b8eed7e82" containerName="extract-content" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.635531 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="a869b596-97ec-475c-be51-d13b8eed7e82" containerName="registry-server" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.637201 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.656087 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g4thw"] Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.825361 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-utilities\") pod \"community-operators-g4thw\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.825444 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-catalog-content\") pod \"community-operators-g4thw\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.825488 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9l9bv\" (UniqueName: \"kubernetes.io/projected/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-kube-api-access-9l9bv\") pod \"community-operators-g4thw\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.927959 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-utilities\") pod \"community-operators-g4thw\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.928069 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-catalog-content\") pod \"community-operators-g4thw\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.928120 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9l9bv\" (UniqueName: \"kubernetes.io/projected/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-kube-api-access-9l9bv\") pod \"community-operators-g4thw\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.928692 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-utilities\") pod \"community-operators-g4thw\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.929003 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-catalog-content\") pod \"community-operators-g4thw\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.950334 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9l9bv\" (UniqueName: \"kubernetes.io/projected/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-kube-api-access-9l9bv\") pod \"community-operators-g4thw\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:37 crc kubenswrapper[4788]: I1211 10:21:37.967261 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:38 crc kubenswrapper[4788]: I1211 10:21:38.538462 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g4thw"] Dec 11 10:21:39 crc kubenswrapper[4788]: I1211 10:21:39.456402 4788 generic.go:334] "Generic (PLEG): container finished" podID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" containerID="ed3ca97acbd83a9bc0aeb9bc3f747a4e0d80902ed6087861daf6f1341a12cfff" exitCode=0 Dec 11 10:21:39 crc kubenswrapper[4788]: I1211 10:21:39.456473 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4thw" event={"ID":"1e51fdb5-63d7-4414-bc76-6b8e0727ec36","Type":"ContainerDied","Data":"ed3ca97acbd83a9bc0aeb9bc3f747a4e0d80902ed6087861daf6f1341a12cfff"} Dec 11 10:21:39 crc kubenswrapper[4788]: I1211 10:21:39.457833 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4thw" event={"ID":"1e51fdb5-63d7-4414-bc76-6b8e0727ec36","Type":"ContainerStarted","Data":"9c792a82419d64df0d8ff681d7ec14e181a086f1a8b9d27baff8fcc675f657bb"} Dec 11 10:21:41 crc kubenswrapper[4788]: I1211 10:21:41.479976 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4thw" event={"ID":"1e51fdb5-63d7-4414-bc76-6b8e0727ec36","Type":"ContainerStarted","Data":"588c1e163abe5105247bd0ed7d51d07938d0d67eb43ae1fa5463f3564f0b106d"} Dec 11 10:21:42 crc kubenswrapper[4788]: I1211 10:21:42.492309 4788 generic.go:334] "Generic (PLEG): container finished" podID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" containerID="588c1e163abe5105247bd0ed7d51d07938d0d67eb43ae1fa5463f3564f0b106d" exitCode=0 Dec 11 10:21:42 crc kubenswrapper[4788]: I1211 10:21:42.492410 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4thw" event={"ID":"1e51fdb5-63d7-4414-bc76-6b8e0727ec36","Type":"ContainerDied","Data":"588c1e163abe5105247bd0ed7d51d07938d0d67eb43ae1fa5463f3564f0b106d"} Dec 11 10:21:43 crc kubenswrapper[4788]: I1211 10:21:43.505117 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4thw" event={"ID":"1e51fdb5-63d7-4414-bc76-6b8e0727ec36","Type":"ContainerStarted","Data":"257eaeb9eadd3e6d51399b9cd4aeeb65b86a3bb91732ec8211c09847a01c281b"} Dec 11 10:21:43 crc kubenswrapper[4788]: I1211 10:21:43.531039 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g4thw" podStartSLOduration=3.021190501 podStartE2EDuration="6.531017105s" podCreationTimestamp="2025-12-11 10:21:37 +0000 UTC" firstStartedPulling="2025-12-11 10:21:39.45967443 +0000 UTC m=+3629.530454016" lastFinishedPulling="2025-12-11 10:21:42.969501034 +0000 UTC m=+3633.040280620" observedRunningTime="2025-12-11 10:21:43.526753237 +0000 UTC m=+3633.597532823" watchObservedRunningTime="2025-12-11 10:21:43.531017105 +0000 UTC m=+3633.601796691" Dec 11 10:21:47 crc kubenswrapper[4788]: I1211 10:21:47.968421 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:47 crc kubenswrapper[4788]: I1211 10:21:47.969029 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:48 crc kubenswrapper[4788]: I1211 10:21:48.018598 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:48 crc kubenswrapper[4788]: I1211 10:21:48.619499 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:48 crc kubenswrapper[4788]: I1211 10:21:48.683950 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g4thw"] Dec 11 10:21:50 crc kubenswrapper[4788]: I1211 10:21:50.580561 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g4thw" podUID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" containerName="registry-server" containerID="cri-o://257eaeb9eadd3e6d51399b9cd4aeeb65b86a3bb91732ec8211c09847a01c281b" gracePeriod=2 Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.369931 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.370534 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.595570 4788 generic.go:334] "Generic (PLEG): container finished" podID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" containerID="257eaeb9eadd3e6d51399b9cd4aeeb65b86a3bb91732ec8211c09847a01c281b" exitCode=0 Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.595630 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4thw" event={"ID":"1e51fdb5-63d7-4414-bc76-6b8e0727ec36","Type":"ContainerDied","Data":"257eaeb9eadd3e6d51399b9cd4aeeb65b86a3bb91732ec8211c09847a01c281b"} Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.858542 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.928379 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-catalog-content\") pod \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.928572 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9l9bv\" (UniqueName: \"kubernetes.io/projected/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-kube-api-access-9l9bv\") pod \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.928845 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-utilities\") pod \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\" (UID: \"1e51fdb5-63d7-4414-bc76-6b8e0727ec36\") " Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.929732 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-utilities" (OuterVolumeSpecName: "utilities") pod "1e51fdb5-63d7-4414-bc76-6b8e0727ec36" (UID: "1e51fdb5-63d7-4414-bc76-6b8e0727ec36"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.930353 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.937664 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-kube-api-access-9l9bv" (OuterVolumeSpecName: "kube-api-access-9l9bv") pod "1e51fdb5-63d7-4414-bc76-6b8e0727ec36" (UID: "1e51fdb5-63d7-4414-bc76-6b8e0727ec36"). InnerVolumeSpecName "kube-api-access-9l9bv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:21:51 crc kubenswrapper[4788]: I1211 10:21:51.990023 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1e51fdb5-63d7-4414-bc76-6b8e0727ec36" (UID: "1e51fdb5-63d7-4414-bc76-6b8e0727ec36"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:21:52 crc kubenswrapper[4788]: I1211 10:21:52.032667 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9l9bv\" (UniqueName: \"kubernetes.io/projected/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-kube-api-access-9l9bv\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:52 crc kubenswrapper[4788]: I1211 10:21:52.032710 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e51fdb5-63d7-4414-bc76-6b8e0727ec36-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:21:52 crc kubenswrapper[4788]: I1211 10:21:52.608744 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g4thw" event={"ID":"1e51fdb5-63d7-4414-bc76-6b8e0727ec36","Type":"ContainerDied","Data":"9c792a82419d64df0d8ff681d7ec14e181a086f1a8b9d27baff8fcc675f657bb"} Dec 11 10:21:52 crc kubenswrapper[4788]: I1211 10:21:52.608816 4788 scope.go:117] "RemoveContainer" containerID="257eaeb9eadd3e6d51399b9cd4aeeb65b86a3bb91732ec8211c09847a01c281b" Dec 11 10:21:52 crc kubenswrapper[4788]: I1211 10:21:52.608865 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g4thw" Dec 11 10:21:52 crc kubenswrapper[4788]: I1211 10:21:52.637129 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g4thw"] Dec 11 10:21:52 crc kubenswrapper[4788]: I1211 10:21:52.639021 4788 scope.go:117] "RemoveContainer" containerID="588c1e163abe5105247bd0ed7d51d07938d0d67eb43ae1fa5463f3564f0b106d" Dec 11 10:21:52 crc kubenswrapper[4788]: I1211 10:21:52.649554 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g4thw"] Dec 11 10:21:52 crc kubenswrapper[4788]: I1211 10:21:52.685767 4788 scope.go:117] "RemoveContainer" containerID="ed3ca97acbd83a9bc0aeb9bc3f747a4e0d80902ed6087861daf6f1341a12cfff" Dec 11 10:21:54 crc kubenswrapper[4788]: I1211 10:21:54.507951 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" path="/var/lib/kubelet/pods/1e51fdb5-63d7-4414-bc76-6b8e0727ec36/volumes" Dec 11 10:22:21 crc kubenswrapper[4788]: I1211 10:22:21.369032 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:22:21 crc kubenswrapper[4788]: I1211 10:22:21.369703 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:22:51 crc kubenswrapper[4788]: I1211 10:22:51.369504 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:22:51 crc kubenswrapper[4788]: I1211 10:22:51.370159 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:22:51 crc kubenswrapper[4788]: I1211 10:22:51.370219 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 10:22:51 crc kubenswrapper[4788]: I1211 10:22:51.371103 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:22:51 crc kubenswrapper[4788]: I1211 10:22:51.371217 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" gracePeriod=600 Dec 11 10:22:51 crc kubenswrapper[4788]: E1211 10:22:51.551770 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:22:52 crc kubenswrapper[4788]: I1211 10:22:52.231743 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" exitCode=0 Dec 11 10:22:52 crc kubenswrapper[4788]: I1211 10:22:52.231794 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660"} Dec 11 10:22:52 crc kubenswrapper[4788]: I1211 10:22:52.231878 4788 scope.go:117] "RemoveContainer" containerID="324fe31631691b87cae3df8fcfed8541fe8fc5e8b88ea7b7fd9527dca5f93f1d" Dec 11 10:22:52 crc kubenswrapper[4788]: I1211 10:22:52.232722 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:22:52 crc kubenswrapper[4788]: E1211 10:22:52.233082 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:23:03 crc kubenswrapper[4788]: I1211 10:23:03.496378 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:23:03 crc kubenswrapper[4788]: E1211 10:23:03.497442 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:23:15 crc kubenswrapper[4788]: I1211 10:23:15.496562 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:23:15 crc kubenswrapper[4788]: E1211 10:23:15.497357 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:23:30 crc kubenswrapper[4788]: I1211 10:23:30.512596 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:23:30 crc kubenswrapper[4788]: E1211 10:23:30.513467 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:23:44 crc kubenswrapper[4788]: I1211 10:23:44.496542 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:23:44 crc kubenswrapper[4788]: E1211 10:23:44.498759 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:23:59 crc kubenswrapper[4788]: I1211 10:23:59.496246 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:23:59 crc kubenswrapper[4788]: E1211 10:23:59.497349 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:24:13 crc kubenswrapper[4788]: I1211 10:24:13.496334 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:24:13 crc kubenswrapper[4788]: E1211 10:24:13.497190 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:24:24 crc kubenswrapper[4788]: I1211 10:24:24.496691 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:24:24 crc kubenswrapper[4788]: E1211 10:24:24.497728 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:24:36 crc kubenswrapper[4788]: I1211 10:24:36.496687 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:24:36 crc kubenswrapper[4788]: E1211 10:24:36.497674 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:24:47 crc kubenswrapper[4788]: I1211 10:24:47.495870 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:24:47 crc kubenswrapper[4788]: E1211 10:24:47.496881 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:25:02 crc kubenswrapper[4788]: I1211 10:25:02.496253 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:25:02 crc kubenswrapper[4788]: E1211 10:25:02.497071 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:25:15 crc kubenswrapper[4788]: I1211 10:25:15.496529 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:25:15 crc kubenswrapper[4788]: E1211 10:25:15.497256 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:25:30 crc kubenswrapper[4788]: I1211 10:25:30.503670 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:25:30 crc kubenswrapper[4788]: E1211 10:25:30.504645 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:25:42 crc kubenswrapper[4788]: I1211 10:25:42.496442 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:25:42 crc kubenswrapper[4788]: E1211 10:25:42.510806 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:25:57 crc kubenswrapper[4788]: I1211 10:25:57.495494 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:25:57 crc kubenswrapper[4788]: E1211 10:25:57.496379 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.612064 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4shjb"] Dec 11 10:26:10 crc kubenswrapper[4788]: E1211 10:26:10.613283 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" containerName="registry-server" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.613304 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" containerName="registry-server" Dec 11 10:26:10 crc kubenswrapper[4788]: E1211 10:26:10.613349 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" containerName="extract-content" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.613358 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" containerName="extract-content" Dec 11 10:26:10 crc kubenswrapper[4788]: E1211 10:26:10.613378 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" containerName="extract-utilities" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.613390 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" containerName="extract-utilities" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.613649 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e51fdb5-63d7-4414-bc76-6b8e0727ec36" containerName="registry-server" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.618116 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.626455 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4shjb"] Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.689040 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70fa93f1-ffeb-40d1-9df3-284d4ec505fa-catalog-content\") pod \"certified-operators-4shjb\" (UID: \"70fa93f1-ffeb-40d1-9df3-284d4ec505fa\") " pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.689702 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70fa93f1-ffeb-40d1-9df3-284d4ec505fa-utilities\") pod \"certified-operators-4shjb\" (UID: \"70fa93f1-ffeb-40d1-9df3-284d4ec505fa\") " pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.689975 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktj2c\" (UniqueName: \"kubernetes.io/projected/70fa93f1-ffeb-40d1-9df3-284d4ec505fa-kube-api-access-ktj2c\") pod \"certified-operators-4shjb\" (UID: \"70fa93f1-ffeb-40d1-9df3-284d4ec505fa\") " pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.793211 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktj2c\" (UniqueName: \"kubernetes.io/projected/70fa93f1-ffeb-40d1-9df3-284d4ec505fa-kube-api-access-ktj2c\") pod \"certified-operators-4shjb\" (UID: \"70fa93f1-ffeb-40d1-9df3-284d4ec505fa\") " pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.793515 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70fa93f1-ffeb-40d1-9df3-284d4ec505fa-catalog-content\") pod \"certified-operators-4shjb\" (UID: \"70fa93f1-ffeb-40d1-9df3-284d4ec505fa\") " pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.793598 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70fa93f1-ffeb-40d1-9df3-284d4ec505fa-utilities\") pod \"certified-operators-4shjb\" (UID: \"70fa93f1-ffeb-40d1-9df3-284d4ec505fa\") " pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.794077 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70fa93f1-ffeb-40d1-9df3-284d4ec505fa-catalog-content\") pod \"certified-operators-4shjb\" (UID: \"70fa93f1-ffeb-40d1-9df3-284d4ec505fa\") " pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.794097 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70fa93f1-ffeb-40d1-9df3-284d4ec505fa-utilities\") pod \"certified-operators-4shjb\" (UID: \"70fa93f1-ffeb-40d1-9df3-284d4ec505fa\") " pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.817317 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktj2c\" (UniqueName: \"kubernetes.io/projected/70fa93f1-ffeb-40d1-9df3-284d4ec505fa-kube-api-access-ktj2c\") pod \"certified-operators-4shjb\" (UID: \"70fa93f1-ffeb-40d1-9df3-284d4ec505fa\") " pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:10 crc kubenswrapper[4788]: I1211 10:26:10.992792 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:11 crc kubenswrapper[4788]: I1211 10:26:11.496590 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:26:11 crc kubenswrapper[4788]: E1211 10:26:11.497194 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:26:11 crc kubenswrapper[4788]: I1211 10:26:11.535518 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4shjb"] Dec 11 10:26:12 crc kubenswrapper[4788]: I1211 10:26:12.485382 4788 generic.go:334] "Generic (PLEG): container finished" podID="70fa93f1-ffeb-40d1-9df3-284d4ec505fa" containerID="d73ab391bf44f220fb2738762390cd23022ad84176bcb26bc46d4bb77935a37f" exitCode=0 Dec 11 10:26:12 crc kubenswrapper[4788]: I1211 10:26:12.485441 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4shjb" event={"ID":"70fa93f1-ffeb-40d1-9df3-284d4ec505fa","Type":"ContainerDied","Data":"d73ab391bf44f220fb2738762390cd23022ad84176bcb26bc46d4bb77935a37f"} Dec 11 10:26:12 crc kubenswrapper[4788]: I1211 10:26:12.485716 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4shjb" event={"ID":"70fa93f1-ffeb-40d1-9df3-284d4ec505fa","Type":"ContainerStarted","Data":"810f89335217e118ed476ade812a269ebaf8fa656069b0a9650aa9c097a3780e"} Dec 11 10:26:12 crc kubenswrapper[4788]: I1211 10:26:12.487810 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:26:18 crc kubenswrapper[4788]: I1211 10:26:18.546653 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4shjb" event={"ID":"70fa93f1-ffeb-40d1-9df3-284d4ec505fa","Type":"ContainerStarted","Data":"8729b0730d8324f565ef1776e3c2d1f01e9f8369b8b67b5acd1b20a594e44e19"} Dec 11 10:26:19 crc kubenswrapper[4788]: I1211 10:26:19.557936 4788 generic.go:334] "Generic (PLEG): container finished" podID="70fa93f1-ffeb-40d1-9df3-284d4ec505fa" containerID="8729b0730d8324f565ef1776e3c2d1f01e9f8369b8b67b5acd1b20a594e44e19" exitCode=0 Dec 11 10:26:19 crc kubenswrapper[4788]: I1211 10:26:19.558110 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4shjb" event={"ID":"70fa93f1-ffeb-40d1-9df3-284d4ec505fa","Type":"ContainerDied","Data":"8729b0730d8324f565ef1776e3c2d1f01e9f8369b8b67b5acd1b20a594e44e19"} Dec 11 10:26:21 crc kubenswrapper[4788]: I1211 10:26:21.578627 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4shjb" event={"ID":"70fa93f1-ffeb-40d1-9df3-284d4ec505fa","Type":"ContainerStarted","Data":"3e70487df10f16bea9a325ef28978990b1c296501adcd631f551f9c09985d0c5"} Dec 11 10:26:21 crc kubenswrapper[4788]: I1211 10:26:21.603825 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4shjb" podStartSLOduration=3.489225944 podStartE2EDuration="11.603802079s" podCreationTimestamp="2025-12-11 10:26:10 +0000 UTC" firstStartedPulling="2025-12-11 10:26:12.48752527 +0000 UTC m=+3902.558304856" lastFinishedPulling="2025-12-11 10:26:20.602101405 +0000 UTC m=+3910.672880991" observedRunningTime="2025-12-11 10:26:21.597577601 +0000 UTC m=+3911.668357187" watchObservedRunningTime="2025-12-11 10:26:21.603802079 +0000 UTC m=+3911.674581665" Dec 11 10:26:25 crc kubenswrapper[4788]: I1211 10:26:25.496462 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:26:25 crc kubenswrapper[4788]: E1211 10:26:25.497256 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:26:30 crc kubenswrapper[4788]: I1211 10:26:30.993451 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:30 crc kubenswrapper[4788]: I1211 10:26:30.994180 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:31 crc kubenswrapper[4788]: I1211 10:26:31.043337 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:31 crc kubenswrapper[4788]: I1211 10:26:31.739711 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4shjb" Dec 11 10:26:31 crc kubenswrapper[4788]: I1211 10:26:31.812097 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4shjb"] Dec 11 10:26:31 crc kubenswrapper[4788]: I1211 10:26:31.885951 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tmjrw"] Dec 11 10:26:31 crc kubenswrapper[4788]: I1211 10:26:31.886250 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tmjrw" podUID="59256e55-52ad-435a-8996-30d1ec1a0e18" containerName="registry-server" containerID="cri-o://7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f" gracePeriod=2 Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.513554 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.578933 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-catalog-content\") pod \"59256e55-52ad-435a-8996-30d1ec1a0e18\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.579199 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fmkf\" (UniqueName: \"kubernetes.io/projected/59256e55-52ad-435a-8996-30d1ec1a0e18-kube-api-access-2fmkf\") pod \"59256e55-52ad-435a-8996-30d1ec1a0e18\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.579298 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-utilities\") pod \"59256e55-52ad-435a-8996-30d1ec1a0e18\" (UID: \"59256e55-52ad-435a-8996-30d1ec1a0e18\") " Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.580538 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-utilities" (OuterVolumeSpecName: "utilities") pod "59256e55-52ad-435a-8996-30d1ec1a0e18" (UID: "59256e55-52ad-435a-8996-30d1ec1a0e18"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.587053 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59256e55-52ad-435a-8996-30d1ec1a0e18-kube-api-access-2fmkf" (OuterVolumeSpecName: "kube-api-access-2fmkf") pod "59256e55-52ad-435a-8996-30d1ec1a0e18" (UID: "59256e55-52ad-435a-8996-30d1ec1a0e18"). InnerVolumeSpecName "kube-api-access-2fmkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.631874 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "59256e55-52ad-435a-8996-30d1ec1a0e18" (UID: "59256e55-52ad-435a-8996-30d1ec1a0e18"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.681502 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.681744 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fmkf\" (UniqueName: \"kubernetes.io/projected/59256e55-52ad-435a-8996-30d1ec1a0e18-kube-api-access-2fmkf\") on node \"crc\" DevicePath \"\"" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.681803 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/59256e55-52ad-435a-8996-30d1ec1a0e18-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.697882 4788 generic.go:334] "Generic (PLEG): container finished" podID="59256e55-52ad-435a-8996-30d1ec1a0e18" containerID="7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f" exitCode=0 Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.697986 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmjrw" event={"ID":"59256e55-52ad-435a-8996-30d1ec1a0e18","Type":"ContainerDied","Data":"7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f"} Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.698038 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmjrw" event={"ID":"59256e55-52ad-435a-8996-30d1ec1a0e18","Type":"ContainerDied","Data":"6756e0ca846831ee4d96dc64b4ddaf0a23c1c82c3267dc2740b0e025f0e1ef0e"} Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.698067 4788 scope.go:117] "RemoveContainer" containerID="7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.698059 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tmjrw" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.742988 4788 scope.go:117] "RemoveContainer" containerID="54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.794367 4788 scope.go:117] "RemoveContainer" containerID="3d9c73f1bd839896066dc0b8b79fd7fe2e671a792203db123e10dfea8ea0ef06" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.800554 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tmjrw"] Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.838947 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tmjrw"] Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.860294 4788 scope.go:117] "RemoveContainer" containerID="7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f" Dec 11 10:26:32 crc kubenswrapper[4788]: E1211 10:26:32.860735 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f\": container with ID starting with 7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f not found: ID does not exist" containerID="7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.860769 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f"} err="failed to get container status \"7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f\": rpc error: code = NotFound desc = could not find container \"7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f\": container with ID starting with 7adb69716cd2b1d3887615e56c8b672488cfd4080640075ae853a525ce47223f not found: ID does not exist" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.860795 4788 scope.go:117] "RemoveContainer" containerID="54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c" Dec 11 10:26:32 crc kubenswrapper[4788]: E1211 10:26:32.860975 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c\": container with ID starting with 54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c not found: ID does not exist" containerID="54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.860998 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c"} err="failed to get container status \"54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c\": rpc error: code = NotFound desc = could not find container \"54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c\": container with ID starting with 54856c8e99827841b51ee100d93b3da9079520900c4f24aa9c9f1f96e5757b9c not found: ID does not exist" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.861012 4788 scope.go:117] "RemoveContainer" containerID="3d9c73f1bd839896066dc0b8b79fd7fe2e671a792203db123e10dfea8ea0ef06" Dec 11 10:26:32 crc kubenswrapper[4788]: E1211 10:26:32.861210 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d9c73f1bd839896066dc0b8b79fd7fe2e671a792203db123e10dfea8ea0ef06\": container with ID starting with 3d9c73f1bd839896066dc0b8b79fd7fe2e671a792203db123e10dfea8ea0ef06 not found: ID does not exist" containerID="3d9c73f1bd839896066dc0b8b79fd7fe2e671a792203db123e10dfea8ea0ef06" Dec 11 10:26:32 crc kubenswrapper[4788]: I1211 10:26:32.861247 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d9c73f1bd839896066dc0b8b79fd7fe2e671a792203db123e10dfea8ea0ef06"} err="failed to get container status \"3d9c73f1bd839896066dc0b8b79fd7fe2e671a792203db123e10dfea8ea0ef06\": rpc error: code = NotFound desc = could not find container \"3d9c73f1bd839896066dc0b8b79fd7fe2e671a792203db123e10dfea8ea0ef06\": container with ID starting with 3d9c73f1bd839896066dc0b8b79fd7fe2e671a792203db123e10dfea8ea0ef06 not found: ID does not exist" Dec 11 10:26:34 crc kubenswrapper[4788]: I1211 10:26:34.511642 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59256e55-52ad-435a-8996-30d1ec1a0e18" path="/var/lib/kubelet/pods/59256e55-52ad-435a-8996-30d1ec1a0e18/volumes" Dec 11 10:26:37 crc kubenswrapper[4788]: I1211 10:26:37.496213 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:26:37 crc kubenswrapper[4788]: E1211 10:26:37.496856 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:26:48 crc kubenswrapper[4788]: I1211 10:26:48.497325 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:26:48 crc kubenswrapper[4788]: E1211 10:26:48.498369 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:27:03 crc kubenswrapper[4788]: I1211 10:27:03.496965 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:27:03 crc kubenswrapper[4788]: E1211 10:27:03.497968 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:27:18 crc kubenswrapper[4788]: I1211 10:27:18.496460 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:27:18 crc kubenswrapper[4788]: E1211 10:27:18.497497 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:27:29 crc kubenswrapper[4788]: I1211 10:27:29.496060 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:27:29 crc kubenswrapper[4788]: E1211 10:27:29.497175 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:27:40 crc kubenswrapper[4788]: I1211 10:27:40.505949 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:27:40 crc kubenswrapper[4788]: E1211 10:27:40.507259 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:27:51 crc kubenswrapper[4788]: I1211 10:27:51.494592 4788 generic.go:334] "Generic (PLEG): container finished" podID="5cf588e9-b8ff-4862-9a71-8e6805d89b2a" containerID="30cda3f5b59442cd24f40f7c7381efe9cc2c6403f09a080726bcd1073c9a4071" exitCode=0 Dec 11 10:27:51 crc kubenswrapper[4788]: I1211 10:27:51.495314 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5cf588e9-b8ff-4862-9a71-8e6805d89b2a","Type":"ContainerDied","Data":"30cda3f5b59442cd24f40f7c7381efe9cc2c6403f09a080726bcd1073c9a4071"} Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.857494 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.915531 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pft9\" (UniqueName: \"kubernetes.io/projected/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-kube-api-access-9pft9\") pod \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.915683 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config\") pod \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.915711 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ssh-key\") pod \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.915758 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config-secret\") pod \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.915801 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-config-data\") pod \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.915892 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.915960 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ca-certs\") pod \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.916075 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-workdir\") pod \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.916620 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-temporary\") pod \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\" (UID: \"5cf588e9-b8ff-4862-9a71-8e6805d89b2a\") " Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.916949 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-config-data" (OuterVolumeSpecName: "config-data") pod "5cf588e9-b8ff-4862-9a71-8e6805d89b2a" (UID: "5cf588e9-b8ff-4862-9a71-8e6805d89b2a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.917064 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "5cf588e9-b8ff-4862-9a71-8e6805d89b2a" (UID: "5cf588e9-b8ff-4862-9a71-8e6805d89b2a"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.917388 4788 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.917413 4788 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.921088 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "5cf588e9-b8ff-4862-9a71-8e6805d89b2a" (UID: "5cf588e9-b8ff-4862-9a71-8e6805d89b2a"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.922012 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-kube-api-access-9pft9" (OuterVolumeSpecName: "kube-api-access-9pft9") pod "5cf588e9-b8ff-4862-9a71-8e6805d89b2a" (UID: "5cf588e9-b8ff-4862-9a71-8e6805d89b2a"). InnerVolumeSpecName "kube-api-access-9pft9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.924659 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "test-operator-logs") pod "5cf588e9-b8ff-4862-9a71-8e6805d89b2a" (UID: "5cf588e9-b8ff-4862-9a71-8e6805d89b2a"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.945333 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5cf588e9-b8ff-4862-9a71-8e6805d89b2a" (UID: "5cf588e9-b8ff-4862-9a71-8e6805d89b2a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.955503 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "5cf588e9-b8ff-4862-9a71-8e6805d89b2a" (UID: "5cf588e9-b8ff-4862-9a71-8e6805d89b2a"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.958660 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "5cf588e9-b8ff-4862-9a71-8e6805d89b2a" (UID: "5cf588e9-b8ff-4862-9a71-8e6805d89b2a"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:27:52 crc kubenswrapper[4788]: I1211 10:27:52.983135 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "5cf588e9-b8ff-4862-9a71-8e6805d89b2a" (UID: "5cf588e9-b8ff-4862-9a71-8e6805d89b2a"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.019388 4788 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.019420 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pft9\" (UniqueName: \"kubernetes.io/projected/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-kube-api-access-9pft9\") on node \"crc\" DevicePath \"\"" Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.019429 4788 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.019440 4788 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.019448 4788 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.019480 4788 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.019490 4788 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5cf588e9-b8ff-4862-9a71-8e6805d89b2a-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.041009 4788 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.121778 4788 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.495459 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.514997 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5cf588e9-b8ff-4862-9a71-8e6805d89b2a","Type":"ContainerDied","Data":"33d5f2dc4a61cdde8022fd20dcbeac0d56fea932bcdb38cd0d20b1d78befeccc"} Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.515057 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33d5f2dc4a61cdde8022fd20dcbeac0d56fea932bcdb38cd0d20b1d78befeccc" Dec 11 10:27:53 crc kubenswrapper[4788]: I1211 10:27:53.515054 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 11 10:27:53 crc kubenswrapper[4788]: E1211 10:27:53.565597 4788 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cf588e9_b8ff_4862_9a71_8e6805d89b2a.slice/crio-33d5f2dc4a61cdde8022fd20dcbeac0d56fea932bcdb38cd0d20b1d78befeccc\": RecentStats: unable to find data in memory cache]" Dec 11 10:27:54 crc kubenswrapper[4788]: I1211 10:27:54.527837 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"875931243e5fca3f1d8e6275cbb949ff735c64d729ac906beafebb86ab24bc38"} Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.481301 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 11 10:28:06 crc kubenswrapper[4788]: E1211 10:28:06.483755 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59256e55-52ad-435a-8996-30d1ec1a0e18" containerName="extract-content" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.483893 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="59256e55-52ad-435a-8996-30d1ec1a0e18" containerName="extract-content" Dec 11 10:28:06 crc kubenswrapper[4788]: E1211 10:28:06.483992 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cf588e9-b8ff-4862-9a71-8e6805d89b2a" containerName="tempest-tests-tempest-tests-runner" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.484070 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cf588e9-b8ff-4862-9a71-8e6805d89b2a" containerName="tempest-tests-tempest-tests-runner" Dec 11 10:28:06 crc kubenswrapper[4788]: E1211 10:28:06.484172 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59256e55-52ad-435a-8996-30d1ec1a0e18" containerName="extract-utilities" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.484271 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="59256e55-52ad-435a-8996-30d1ec1a0e18" containerName="extract-utilities" Dec 11 10:28:06 crc kubenswrapper[4788]: E1211 10:28:06.484354 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59256e55-52ad-435a-8996-30d1ec1a0e18" containerName="registry-server" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.484435 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="59256e55-52ad-435a-8996-30d1ec1a0e18" containerName="registry-server" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.484729 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cf588e9-b8ff-4862-9a71-8e6805d89b2a" containerName="tempest-tests-tempest-tests-runner" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.484810 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="59256e55-52ad-435a-8996-30d1ec1a0e18" containerName="registry-server" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.485869 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.490870 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-c9zcn" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.495245 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.686775 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4prq\" (UniqueName: \"kubernetes.io/projected/c7921196-c38d-467e-b733-20416c265305-kube-api-access-q4prq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c7921196-c38d-467e-b733-20416c265305\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.686873 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c7921196-c38d-467e-b733-20416c265305\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.789725 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4prq\" (UniqueName: \"kubernetes.io/projected/c7921196-c38d-467e-b733-20416c265305-kube-api-access-q4prq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c7921196-c38d-467e-b733-20416c265305\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.789853 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c7921196-c38d-467e-b733-20416c265305\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.790570 4788 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c7921196-c38d-467e-b733-20416c265305\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.810684 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4prq\" (UniqueName: \"kubernetes.io/projected/c7921196-c38d-467e-b733-20416c265305-kube-api-access-q4prq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c7921196-c38d-467e-b733-20416c265305\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 11 10:28:06 crc kubenswrapper[4788]: I1211 10:28:06.823620 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c7921196-c38d-467e-b733-20416c265305\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 11 10:28:07 crc kubenswrapper[4788]: I1211 10:28:07.116528 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 11 10:28:07 crc kubenswrapper[4788]: W1211 10:28:07.577131 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7921196_c38d_467e_b733_20416c265305.slice/crio-e3bdb7b4a53f2a8692097a5e308e3db7caf0052d4d54e3baa6c6a03e7c7b0e74 WatchSource:0}: Error finding container e3bdb7b4a53f2a8692097a5e308e3db7caf0052d4d54e3baa6c6a03e7c7b0e74: Status 404 returned error can't find the container with id e3bdb7b4a53f2a8692097a5e308e3db7caf0052d4d54e3baa6c6a03e7c7b0e74 Dec 11 10:28:07 crc kubenswrapper[4788]: I1211 10:28:07.578085 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 11 10:28:07 crc kubenswrapper[4788]: I1211 10:28:07.660747 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"c7921196-c38d-467e-b733-20416c265305","Type":"ContainerStarted","Data":"e3bdb7b4a53f2a8692097a5e308e3db7caf0052d4d54e3baa6c6a03e7c7b0e74"} Dec 11 10:28:10 crc kubenswrapper[4788]: I1211 10:28:10.688001 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"c7921196-c38d-467e-b733-20416c265305","Type":"ContainerStarted","Data":"da01e3b692f91ed24f8c1ddb2eb8dc267d30046f155f498cbbc221beb97a2233"} Dec 11 10:28:10 crc kubenswrapper[4788]: I1211 10:28:10.711167 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.8031238739999997 podStartE2EDuration="4.711146953s" podCreationTimestamp="2025-12-11 10:28:06 +0000 UTC" firstStartedPulling="2025-12-11 10:28:07.57945898 +0000 UTC m=+4017.650238566" lastFinishedPulling="2025-12-11 10:28:09.487482049 +0000 UTC m=+4019.558261645" observedRunningTime="2025-12-11 10:28:10.700574475 +0000 UTC m=+4020.771354061" watchObservedRunningTime="2025-12-11 10:28:10.711146953 +0000 UTC m=+4020.781926539" Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.470540 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nlbjk/must-gather-jrgr8"] Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.473319 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/must-gather-jrgr8" Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.476853 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nlbjk"/"openshift-service-ca.crt" Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.476862 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-nlbjk"/"default-dockercfg-bznd7" Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.486147 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nlbjk/must-gather-jrgr8"] Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.523656 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nlbjk"/"kube-root-ca.crt" Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.531346 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/45555a47-8e19-4bcd-8ec0-44e682b2d81c-must-gather-output\") pod \"must-gather-jrgr8\" (UID: \"45555a47-8e19-4bcd-8ec0-44e682b2d81c\") " pod="openshift-must-gather-nlbjk/must-gather-jrgr8" Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.531395 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zcgx\" (UniqueName: \"kubernetes.io/projected/45555a47-8e19-4bcd-8ec0-44e682b2d81c-kube-api-access-7zcgx\") pod \"must-gather-jrgr8\" (UID: \"45555a47-8e19-4bcd-8ec0-44e682b2d81c\") " pod="openshift-must-gather-nlbjk/must-gather-jrgr8" Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.633487 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/45555a47-8e19-4bcd-8ec0-44e682b2d81c-must-gather-output\") pod \"must-gather-jrgr8\" (UID: \"45555a47-8e19-4bcd-8ec0-44e682b2d81c\") " pod="openshift-must-gather-nlbjk/must-gather-jrgr8" Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.633876 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zcgx\" (UniqueName: \"kubernetes.io/projected/45555a47-8e19-4bcd-8ec0-44e682b2d81c-kube-api-access-7zcgx\") pod \"must-gather-jrgr8\" (UID: \"45555a47-8e19-4bcd-8ec0-44e682b2d81c\") " pod="openshift-must-gather-nlbjk/must-gather-jrgr8" Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.634118 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/45555a47-8e19-4bcd-8ec0-44e682b2d81c-must-gather-output\") pod \"must-gather-jrgr8\" (UID: \"45555a47-8e19-4bcd-8ec0-44e682b2d81c\") " pod="openshift-must-gather-nlbjk/must-gather-jrgr8" Dec 11 10:28:32 crc kubenswrapper[4788]: I1211 10:28:32.958343 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zcgx\" (UniqueName: \"kubernetes.io/projected/45555a47-8e19-4bcd-8ec0-44e682b2d81c-kube-api-access-7zcgx\") pod \"must-gather-jrgr8\" (UID: \"45555a47-8e19-4bcd-8ec0-44e682b2d81c\") " pod="openshift-must-gather-nlbjk/must-gather-jrgr8" Dec 11 10:28:33 crc kubenswrapper[4788]: I1211 10:28:33.135771 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/must-gather-jrgr8" Dec 11 10:28:33 crc kubenswrapper[4788]: I1211 10:28:33.636690 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nlbjk/must-gather-jrgr8"] Dec 11 10:28:33 crc kubenswrapper[4788]: I1211 10:28:33.911415 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/must-gather-jrgr8" event={"ID":"45555a47-8e19-4bcd-8ec0-44e682b2d81c","Type":"ContainerStarted","Data":"7c1a594898b115514b1639695a030112d08bf0eafde8877eee0bca38fb2b860c"} Dec 11 10:28:41 crc kubenswrapper[4788]: I1211 10:28:41.016125 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/must-gather-jrgr8" event={"ID":"45555a47-8e19-4bcd-8ec0-44e682b2d81c","Type":"ContainerStarted","Data":"72719bd7cc910eb6548c34d63a30b0c10c8731a159265ce187e49d52b903ea3a"} Dec 11 10:28:41 crc kubenswrapper[4788]: I1211 10:28:41.016663 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/must-gather-jrgr8" event={"ID":"45555a47-8e19-4bcd-8ec0-44e682b2d81c","Type":"ContainerStarted","Data":"2f2477e7db9e9747925d69488506f9784eea0c158d33b6cf791a419a57d4b68e"} Dec 11 10:28:41 crc kubenswrapper[4788]: I1211 10:28:41.038985 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nlbjk/must-gather-jrgr8" podStartSLOduration=2.807772947 podStartE2EDuration="9.038960547s" podCreationTimestamp="2025-12-11 10:28:32 +0000 UTC" firstStartedPulling="2025-12-11 10:28:33.643661034 +0000 UTC m=+4043.714440620" lastFinishedPulling="2025-12-11 10:28:39.874848634 +0000 UTC m=+4049.945628220" observedRunningTime="2025-12-11 10:28:41.032249227 +0000 UTC m=+4051.103028813" watchObservedRunningTime="2025-12-11 10:28:41.038960547 +0000 UTC m=+4051.109740133" Dec 11 10:28:44 crc kubenswrapper[4788]: I1211 10:28:44.536584 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nlbjk/crc-debug-tclqh"] Dec 11 10:28:44 crc kubenswrapper[4788]: I1211 10:28:44.538855 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-tclqh" Dec 11 10:28:44 crc kubenswrapper[4788]: I1211 10:28:44.688685 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/54c75587-f386-4d36-94ee-18c2b213d444-host\") pod \"crc-debug-tclqh\" (UID: \"54c75587-f386-4d36-94ee-18c2b213d444\") " pod="openshift-must-gather-nlbjk/crc-debug-tclqh" Dec 11 10:28:44 crc kubenswrapper[4788]: I1211 10:28:44.688750 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch9fp\" (UniqueName: \"kubernetes.io/projected/54c75587-f386-4d36-94ee-18c2b213d444-kube-api-access-ch9fp\") pod \"crc-debug-tclqh\" (UID: \"54c75587-f386-4d36-94ee-18c2b213d444\") " pod="openshift-must-gather-nlbjk/crc-debug-tclqh" Dec 11 10:28:44 crc kubenswrapper[4788]: I1211 10:28:44.790313 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/54c75587-f386-4d36-94ee-18c2b213d444-host\") pod \"crc-debug-tclqh\" (UID: \"54c75587-f386-4d36-94ee-18c2b213d444\") " pod="openshift-must-gather-nlbjk/crc-debug-tclqh" Dec 11 10:28:44 crc kubenswrapper[4788]: I1211 10:28:44.790371 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch9fp\" (UniqueName: \"kubernetes.io/projected/54c75587-f386-4d36-94ee-18c2b213d444-kube-api-access-ch9fp\") pod \"crc-debug-tclqh\" (UID: \"54c75587-f386-4d36-94ee-18c2b213d444\") " pod="openshift-must-gather-nlbjk/crc-debug-tclqh" Dec 11 10:28:44 crc kubenswrapper[4788]: I1211 10:28:44.790439 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/54c75587-f386-4d36-94ee-18c2b213d444-host\") pod \"crc-debug-tclqh\" (UID: \"54c75587-f386-4d36-94ee-18c2b213d444\") " pod="openshift-must-gather-nlbjk/crc-debug-tclqh" Dec 11 10:28:44 crc kubenswrapper[4788]: I1211 10:28:44.834549 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch9fp\" (UniqueName: \"kubernetes.io/projected/54c75587-f386-4d36-94ee-18c2b213d444-kube-api-access-ch9fp\") pod \"crc-debug-tclqh\" (UID: \"54c75587-f386-4d36-94ee-18c2b213d444\") " pod="openshift-must-gather-nlbjk/crc-debug-tclqh" Dec 11 10:28:44 crc kubenswrapper[4788]: I1211 10:28:44.868252 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-tclqh" Dec 11 10:28:45 crc kubenswrapper[4788]: I1211 10:28:45.056463 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/crc-debug-tclqh" event={"ID":"54c75587-f386-4d36-94ee-18c2b213d444","Type":"ContainerStarted","Data":"c17638b7f7f4ad5fa48b3406092e08fbf070b2d48559b9ea63ffd3b24a8f3cb0"} Dec 11 10:28:59 crc kubenswrapper[4788]: I1211 10:28:59.247293 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/crc-debug-tclqh" event={"ID":"54c75587-f386-4d36-94ee-18c2b213d444","Type":"ContainerStarted","Data":"40d635e36505a9281f136e8a51c3854e90b968a4f76a1b576a2320449e31f783"} Dec 11 10:28:59 crc kubenswrapper[4788]: I1211 10:28:59.271117 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nlbjk/crc-debug-tclqh" podStartSLOduration=1.633025908 podStartE2EDuration="15.271098676s" podCreationTimestamp="2025-12-11 10:28:44 +0000 UTC" firstStartedPulling="2025-12-11 10:28:44.911842438 +0000 UTC m=+4054.982622024" lastFinishedPulling="2025-12-11 10:28:58.549915206 +0000 UTC m=+4068.620694792" observedRunningTime="2025-12-11 10:28:59.260385864 +0000 UTC m=+4069.331165450" watchObservedRunningTime="2025-12-11 10:28:59.271098676 +0000 UTC m=+4069.341878262" Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.571351 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t97zk"] Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.574746 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.602194 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t97zk"] Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.640306 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-utilities\") pod \"redhat-operators-t97zk\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.640366 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-562xk\" (UniqueName: \"kubernetes.io/projected/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-kube-api-access-562xk\") pod \"redhat-operators-t97zk\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.640471 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-catalog-content\") pod \"redhat-operators-t97zk\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.742450 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-utilities\") pod \"redhat-operators-t97zk\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.742510 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-562xk\" (UniqueName: \"kubernetes.io/projected/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-kube-api-access-562xk\") pod \"redhat-operators-t97zk\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.742627 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-catalog-content\") pod \"redhat-operators-t97zk\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.743251 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-catalog-content\") pod \"redhat-operators-t97zk\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.743661 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-utilities\") pod \"redhat-operators-t97zk\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.775390 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-562xk\" (UniqueName: \"kubernetes.io/projected/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-kube-api-access-562xk\") pod \"redhat-operators-t97zk\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:13 crc kubenswrapper[4788]: I1211 10:29:13.903554 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:15 crc kubenswrapper[4788]: I1211 10:29:15.657530 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t97zk"] Dec 11 10:29:16 crc kubenswrapper[4788]: I1211 10:29:16.469323 4788 generic.go:334] "Generic (PLEG): container finished" podID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerID="edb59224d27396f94102edf69baa032ffe34665de39159714c43ee1efba80fb0" exitCode=0 Dec 11 10:29:16 crc kubenswrapper[4788]: I1211 10:29:16.469435 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t97zk" event={"ID":"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176","Type":"ContainerDied","Data":"edb59224d27396f94102edf69baa032ffe34665de39159714c43ee1efba80fb0"} Dec 11 10:29:16 crc kubenswrapper[4788]: I1211 10:29:16.469744 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t97zk" event={"ID":"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176","Type":"ContainerStarted","Data":"73b01aa4cf450782b1b22cdc4d46c69757ae6806f7524742681429220fbcdb80"} Dec 11 10:29:18 crc kubenswrapper[4788]: I1211 10:29:18.489460 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t97zk" event={"ID":"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176","Type":"ContainerStarted","Data":"dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69"} Dec 11 10:29:20 crc kubenswrapper[4788]: I1211 10:29:20.511267 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t97zk" event={"ID":"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176","Type":"ContainerDied","Data":"dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69"} Dec 11 10:29:20 crc kubenswrapper[4788]: I1211 10:29:20.511295 4788 generic.go:334] "Generic (PLEG): container finished" podID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerID="dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69" exitCode=0 Dec 11 10:29:21 crc kubenswrapper[4788]: I1211 10:29:21.526301 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t97zk" event={"ID":"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176","Type":"ContainerStarted","Data":"6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4"} Dec 11 10:29:21 crc kubenswrapper[4788]: I1211 10:29:21.551770 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t97zk" podStartSLOduration=3.851963654 podStartE2EDuration="8.551739531s" podCreationTimestamp="2025-12-11 10:29:13 +0000 UTC" firstStartedPulling="2025-12-11 10:29:16.470996047 +0000 UTC m=+4086.541775633" lastFinishedPulling="2025-12-11 10:29:21.170771924 +0000 UTC m=+4091.241551510" observedRunningTime="2025-12-11 10:29:21.547969505 +0000 UTC m=+4091.618749101" watchObservedRunningTime="2025-12-11 10:29:21.551739531 +0000 UTC m=+4091.622519117" Dec 11 10:29:23 crc kubenswrapper[4788]: I1211 10:29:23.904730 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:23 crc kubenswrapper[4788]: I1211 10:29:23.905286 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:24 crc kubenswrapper[4788]: I1211 10:29:24.971017 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t97zk" podUID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerName="registry-server" probeResult="failure" output=< Dec 11 10:29:24 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 10:29:24 crc kubenswrapper[4788]: > Dec 11 10:29:33 crc kubenswrapper[4788]: I1211 10:29:33.960142 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:34 crc kubenswrapper[4788]: I1211 10:29:34.024866 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:34 crc kubenswrapper[4788]: I1211 10:29:34.202620 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t97zk"] Dec 11 10:29:35 crc kubenswrapper[4788]: I1211 10:29:35.673162 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-t97zk" podUID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerName="registry-server" containerID="cri-o://6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4" gracePeriod=2 Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.193607 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.310029 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-562xk\" (UniqueName: \"kubernetes.io/projected/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-kube-api-access-562xk\") pod \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.310092 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-catalog-content\") pod \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.310223 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-utilities\") pod \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\" (UID: \"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176\") " Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.311331 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-utilities" (OuterVolumeSpecName: "utilities") pod "72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" (UID: "72fbd70a-3df3-4cd7-b02d-9eba5ff8e176"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.329432 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-kube-api-access-562xk" (OuterVolumeSpecName: "kube-api-access-562xk") pod "72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" (UID: "72fbd70a-3df3-4cd7-b02d-9eba5ff8e176"). InnerVolumeSpecName "kube-api-access-562xk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.413180 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-562xk\" (UniqueName: \"kubernetes.io/projected/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-kube-api-access-562xk\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.413527 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.452958 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" (UID: "72fbd70a-3df3-4cd7-b02d-9eba5ff8e176"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.517581 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.683805 4788 generic.go:334] "Generic (PLEG): container finished" podID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerID="6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4" exitCode=0 Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.683881 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t97zk" event={"ID":"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176","Type":"ContainerDied","Data":"6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4"} Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.684009 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t97zk" event={"ID":"72fbd70a-3df3-4cd7-b02d-9eba5ff8e176","Type":"ContainerDied","Data":"73b01aa4cf450782b1b22cdc4d46c69757ae6806f7524742681429220fbcdb80"} Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.684039 4788 scope.go:117] "RemoveContainer" containerID="6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.683932 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t97zk" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.710366 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t97zk"] Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.714446 4788 scope.go:117] "RemoveContainer" containerID="dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.719474 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-t97zk"] Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.758739 4788 scope.go:117] "RemoveContainer" containerID="edb59224d27396f94102edf69baa032ffe34665de39159714c43ee1efba80fb0" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.784071 4788 scope.go:117] "RemoveContainer" containerID="6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4" Dec 11 10:29:36 crc kubenswrapper[4788]: E1211 10:29:36.784598 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4\": container with ID starting with 6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4 not found: ID does not exist" containerID="6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.784665 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4"} err="failed to get container status \"6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4\": rpc error: code = NotFound desc = could not find container \"6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4\": container with ID starting with 6b0493e177900bd9ca85041f7accf8494c99003ea9fed4c4392e530a488599f4 not found: ID does not exist" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.784758 4788 scope.go:117] "RemoveContainer" containerID="dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69" Dec 11 10:29:36 crc kubenswrapper[4788]: E1211 10:29:36.785322 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69\": container with ID starting with dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69 not found: ID does not exist" containerID="dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.785345 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69"} err="failed to get container status \"dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69\": rpc error: code = NotFound desc = could not find container \"dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69\": container with ID starting with dbec3ad1f0e8b64739465124caa0d5a4e2514ca10233b0c870c107fbf2e2ea69 not found: ID does not exist" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.785362 4788 scope.go:117] "RemoveContainer" containerID="edb59224d27396f94102edf69baa032ffe34665de39159714c43ee1efba80fb0" Dec 11 10:29:36 crc kubenswrapper[4788]: E1211 10:29:36.786203 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edb59224d27396f94102edf69baa032ffe34665de39159714c43ee1efba80fb0\": container with ID starting with edb59224d27396f94102edf69baa032ffe34665de39159714c43ee1efba80fb0 not found: ID does not exist" containerID="edb59224d27396f94102edf69baa032ffe34665de39159714c43ee1efba80fb0" Dec 11 10:29:36 crc kubenswrapper[4788]: I1211 10:29:36.786262 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edb59224d27396f94102edf69baa032ffe34665de39159714c43ee1efba80fb0"} err="failed to get container status \"edb59224d27396f94102edf69baa032ffe34665de39159714c43ee1efba80fb0\": rpc error: code = NotFound desc = could not find container \"edb59224d27396f94102edf69baa032ffe34665de39159714c43ee1efba80fb0\": container with ID starting with edb59224d27396f94102edf69baa032ffe34665de39159714c43ee1efba80fb0 not found: ID does not exist" Dec 11 10:29:38 crc kubenswrapper[4788]: I1211 10:29:38.507792 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" path="/var/lib/kubelet/pods/72fbd70a-3df3-4cd7-b02d-9eba5ff8e176/volumes" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.469831 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zn56d"] Dec 11 10:29:49 crc kubenswrapper[4788]: E1211 10:29:49.471047 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerName="registry-server" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.471065 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerName="registry-server" Dec 11 10:29:49 crc kubenswrapper[4788]: E1211 10:29:49.471094 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerName="extract-content" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.471101 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerName="extract-content" Dec 11 10:29:49 crc kubenswrapper[4788]: E1211 10:29:49.471114 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerName="extract-utilities" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.471124 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerName="extract-utilities" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.471436 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="72fbd70a-3df3-4cd7-b02d-9eba5ff8e176" containerName="registry-server" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.473024 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.480232 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zn56d"] Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.525735 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggqtn\" (UniqueName: \"kubernetes.io/projected/58247a11-270b-494d-b7fb-4e5025060189-kube-api-access-ggqtn\") pod \"redhat-marketplace-zn56d\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.525917 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-catalog-content\") pod \"redhat-marketplace-zn56d\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.526297 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-utilities\") pod \"redhat-marketplace-zn56d\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.628667 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-utilities\") pod \"redhat-marketplace-zn56d\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.629325 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-utilities\") pod \"redhat-marketplace-zn56d\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.628835 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggqtn\" (UniqueName: \"kubernetes.io/projected/58247a11-270b-494d-b7fb-4e5025060189-kube-api-access-ggqtn\") pod \"redhat-marketplace-zn56d\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.629436 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-catalog-content\") pod \"redhat-marketplace-zn56d\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.629888 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-catalog-content\") pod \"redhat-marketplace-zn56d\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.651815 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggqtn\" (UniqueName: \"kubernetes.io/projected/58247a11-270b-494d-b7fb-4e5025060189-kube-api-access-ggqtn\") pod \"redhat-marketplace-zn56d\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:49 crc kubenswrapper[4788]: I1211 10:29:49.811648 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:50 crc kubenswrapper[4788]: I1211 10:29:50.303645 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zn56d"] Dec 11 10:29:50 crc kubenswrapper[4788]: I1211 10:29:50.818172 4788 generic.go:334] "Generic (PLEG): container finished" podID="58247a11-270b-494d-b7fb-4e5025060189" containerID="3ac1cb4c4260a66745caa1d7047b379a3335b7c04f5bf413a31e9fd6cfa2f934" exitCode=0 Dec 11 10:29:50 crc kubenswrapper[4788]: I1211 10:29:50.818218 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn56d" event={"ID":"58247a11-270b-494d-b7fb-4e5025060189","Type":"ContainerDied","Data":"3ac1cb4c4260a66745caa1d7047b379a3335b7c04f5bf413a31e9fd6cfa2f934"} Dec 11 10:29:50 crc kubenswrapper[4788]: I1211 10:29:50.818262 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn56d" event={"ID":"58247a11-270b-494d-b7fb-4e5025060189","Type":"ContainerStarted","Data":"3896498e53645428c65189ff0ba489c15cff75ed6011e87f44a0a981a1d34c50"} Dec 11 10:29:51 crc kubenswrapper[4788]: I1211 10:29:51.830511 4788 generic.go:334] "Generic (PLEG): container finished" podID="54c75587-f386-4d36-94ee-18c2b213d444" containerID="40d635e36505a9281f136e8a51c3854e90b968a4f76a1b576a2320449e31f783" exitCode=0 Dec 11 10:29:51 crc kubenswrapper[4788]: I1211 10:29:51.830601 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/crc-debug-tclqh" event={"ID":"54c75587-f386-4d36-94ee-18c2b213d444","Type":"ContainerDied","Data":"40d635e36505a9281f136e8a51c3854e90b968a4f76a1b576a2320449e31f783"} Dec 11 10:29:52 crc kubenswrapper[4788]: I1211 10:29:52.841732 4788 generic.go:334] "Generic (PLEG): container finished" podID="58247a11-270b-494d-b7fb-4e5025060189" containerID="37a972b7c77d6f0324fb189fc0d15ad7d07a08de99cf8035768cf4b035ef1dbf" exitCode=0 Dec 11 10:29:52 crc kubenswrapper[4788]: I1211 10:29:52.841928 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn56d" event={"ID":"58247a11-270b-494d-b7fb-4e5025060189","Type":"ContainerDied","Data":"37a972b7c77d6f0324fb189fc0d15ad7d07a08de99cf8035768cf4b035ef1dbf"} Dec 11 10:29:52 crc kubenswrapper[4788]: I1211 10:29:52.964795 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-tclqh" Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.004686 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ch9fp\" (UniqueName: \"kubernetes.io/projected/54c75587-f386-4d36-94ee-18c2b213d444-kube-api-access-ch9fp\") pod \"54c75587-f386-4d36-94ee-18c2b213d444\" (UID: \"54c75587-f386-4d36-94ee-18c2b213d444\") " Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.004767 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/54c75587-f386-4d36-94ee-18c2b213d444-host\") pod \"54c75587-f386-4d36-94ee-18c2b213d444\" (UID: \"54c75587-f386-4d36-94ee-18c2b213d444\") " Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.004944 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54c75587-f386-4d36-94ee-18c2b213d444-host" (OuterVolumeSpecName: "host") pod "54c75587-f386-4d36-94ee-18c2b213d444" (UID: "54c75587-f386-4d36-94ee-18c2b213d444"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.005487 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nlbjk/crc-debug-tclqh"] Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.005720 4788 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/54c75587-f386-4d36-94ee-18c2b213d444-host\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.010467 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54c75587-f386-4d36-94ee-18c2b213d444-kube-api-access-ch9fp" (OuterVolumeSpecName: "kube-api-access-ch9fp") pod "54c75587-f386-4d36-94ee-18c2b213d444" (UID: "54c75587-f386-4d36-94ee-18c2b213d444"). InnerVolumeSpecName "kube-api-access-ch9fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.016442 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nlbjk/crc-debug-tclqh"] Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.107689 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ch9fp\" (UniqueName: \"kubernetes.io/projected/54c75587-f386-4d36-94ee-18c2b213d444-kube-api-access-ch9fp\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.855590 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn56d" event={"ID":"58247a11-270b-494d-b7fb-4e5025060189","Type":"ContainerStarted","Data":"76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873"} Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.858405 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c17638b7f7f4ad5fa48b3406092e08fbf070b2d48559b9ea63ffd3b24a8f3cb0" Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.858473 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-tclqh" Dec 11 10:29:53 crc kubenswrapper[4788]: I1211 10:29:53.881696 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zn56d" podStartSLOduration=2.3499502310000002 podStartE2EDuration="4.881670925s" podCreationTimestamp="2025-12-11 10:29:49 +0000 UTC" firstStartedPulling="2025-12-11 10:29:50.822367605 +0000 UTC m=+4120.893147191" lastFinishedPulling="2025-12-11 10:29:53.354088299 +0000 UTC m=+4123.424867885" observedRunningTime="2025-12-11 10:29:53.875311744 +0000 UTC m=+4123.946091330" watchObservedRunningTime="2025-12-11 10:29:53.881670925 +0000 UTC m=+4123.952450511" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.217403 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nlbjk/crc-debug-cxd56"] Dec 11 10:29:54 crc kubenswrapper[4788]: E1211 10:29:54.217970 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c75587-f386-4d36-94ee-18c2b213d444" containerName="container-00" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.217991 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c75587-f386-4d36-94ee-18c2b213d444" containerName="container-00" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.218266 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="54c75587-f386-4d36-94ee-18c2b213d444" containerName="container-00" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.219206 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-cxd56" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.332436 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e046bd6-7873-4aec-bad0-6c3c80e92731-host\") pod \"crc-debug-cxd56\" (UID: \"0e046bd6-7873-4aec-bad0-6c3c80e92731\") " pod="openshift-must-gather-nlbjk/crc-debug-cxd56" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.333177 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzk9t\" (UniqueName: \"kubernetes.io/projected/0e046bd6-7873-4aec-bad0-6c3c80e92731-kube-api-access-jzk9t\") pod \"crc-debug-cxd56\" (UID: \"0e046bd6-7873-4aec-bad0-6c3c80e92731\") " pod="openshift-must-gather-nlbjk/crc-debug-cxd56" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.435562 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzk9t\" (UniqueName: \"kubernetes.io/projected/0e046bd6-7873-4aec-bad0-6c3c80e92731-kube-api-access-jzk9t\") pod \"crc-debug-cxd56\" (UID: \"0e046bd6-7873-4aec-bad0-6c3c80e92731\") " pod="openshift-must-gather-nlbjk/crc-debug-cxd56" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.435673 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e046bd6-7873-4aec-bad0-6c3c80e92731-host\") pod \"crc-debug-cxd56\" (UID: \"0e046bd6-7873-4aec-bad0-6c3c80e92731\") " pod="openshift-must-gather-nlbjk/crc-debug-cxd56" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.435902 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e046bd6-7873-4aec-bad0-6c3c80e92731-host\") pod \"crc-debug-cxd56\" (UID: \"0e046bd6-7873-4aec-bad0-6c3c80e92731\") " pod="openshift-must-gather-nlbjk/crc-debug-cxd56" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.461749 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzk9t\" (UniqueName: \"kubernetes.io/projected/0e046bd6-7873-4aec-bad0-6c3c80e92731-kube-api-access-jzk9t\") pod \"crc-debug-cxd56\" (UID: \"0e046bd6-7873-4aec-bad0-6c3c80e92731\") " pod="openshift-must-gather-nlbjk/crc-debug-cxd56" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.512083 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54c75587-f386-4d36-94ee-18c2b213d444" path="/var/lib/kubelet/pods/54c75587-f386-4d36-94ee-18c2b213d444/volumes" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.537983 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-cxd56" Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.874555 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/crc-debug-cxd56" event={"ID":"0e046bd6-7873-4aec-bad0-6c3c80e92731","Type":"ContainerStarted","Data":"288426521a956b3167bca92806dad6d5f5ed6bbf81b1fdaecae995c035cfd9de"} Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.875282 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/crc-debug-cxd56" event={"ID":"0e046bd6-7873-4aec-bad0-6c3c80e92731","Type":"ContainerStarted","Data":"a2473a8ff4d1f2672edeeb22bde2c771e6272245fc8ff8c76b371baafb2b8d90"} Dec 11 10:29:54 crc kubenswrapper[4788]: I1211 10:29:54.902093 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nlbjk/crc-debug-cxd56" podStartSLOduration=0.902061458 podStartE2EDuration="902.061458ms" podCreationTimestamp="2025-12-11 10:29:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:29:54.890067864 +0000 UTC m=+4124.960847450" watchObservedRunningTime="2025-12-11 10:29:54.902061458 +0000 UTC m=+4124.972841044" Dec 11 10:29:55 crc kubenswrapper[4788]: I1211 10:29:55.885547 4788 generic.go:334] "Generic (PLEG): container finished" podID="0e046bd6-7873-4aec-bad0-6c3c80e92731" containerID="288426521a956b3167bca92806dad6d5f5ed6bbf81b1fdaecae995c035cfd9de" exitCode=0 Dec 11 10:29:55 crc kubenswrapper[4788]: I1211 10:29:55.885854 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/crc-debug-cxd56" event={"ID":"0e046bd6-7873-4aec-bad0-6c3c80e92731","Type":"ContainerDied","Data":"288426521a956b3167bca92806dad6d5f5ed6bbf81b1fdaecae995c035cfd9de"} Dec 11 10:29:57 crc kubenswrapper[4788]: I1211 10:29:57.005355 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-cxd56" Dec 11 10:29:57 crc kubenswrapper[4788]: I1211 10:29:57.045982 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nlbjk/crc-debug-cxd56"] Dec 11 10:29:57 crc kubenswrapper[4788]: I1211 10:29:57.055516 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nlbjk/crc-debug-cxd56"] Dec 11 10:29:57 crc kubenswrapper[4788]: I1211 10:29:57.093550 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzk9t\" (UniqueName: \"kubernetes.io/projected/0e046bd6-7873-4aec-bad0-6c3c80e92731-kube-api-access-jzk9t\") pod \"0e046bd6-7873-4aec-bad0-6c3c80e92731\" (UID: \"0e046bd6-7873-4aec-bad0-6c3c80e92731\") " Dec 11 10:29:57 crc kubenswrapper[4788]: I1211 10:29:57.093655 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e046bd6-7873-4aec-bad0-6c3c80e92731-host\") pod \"0e046bd6-7873-4aec-bad0-6c3c80e92731\" (UID: \"0e046bd6-7873-4aec-bad0-6c3c80e92731\") " Dec 11 10:29:57 crc kubenswrapper[4788]: I1211 10:29:57.093772 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0e046bd6-7873-4aec-bad0-6c3c80e92731-host" (OuterVolumeSpecName: "host") pod "0e046bd6-7873-4aec-bad0-6c3c80e92731" (UID: "0e046bd6-7873-4aec-bad0-6c3c80e92731"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:29:57 crc kubenswrapper[4788]: I1211 10:29:57.094479 4788 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0e046bd6-7873-4aec-bad0-6c3c80e92731-host\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:57 crc kubenswrapper[4788]: I1211 10:29:57.105347 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e046bd6-7873-4aec-bad0-6c3c80e92731-kube-api-access-jzk9t" (OuterVolumeSpecName: "kube-api-access-jzk9t") pod "0e046bd6-7873-4aec-bad0-6c3c80e92731" (UID: "0e046bd6-7873-4aec-bad0-6c3c80e92731"). InnerVolumeSpecName "kube-api-access-jzk9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:29:57 crc kubenswrapper[4788]: I1211 10:29:57.195791 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzk9t\" (UniqueName: \"kubernetes.io/projected/0e046bd6-7873-4aec-bad0-6c3c80e92731-kube-api-access-jzk9t\") on node \"crc\" DevicePath \"\"" Dec 11 10:29:57 crc kubenswrapper[4788]: I1211 10:29:57.908267 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2473a8ff4d1f2672edeeb22bde2c771e6272245fc8ff8c76b371baafb2b8d90" Dec 11 10:29:57 crc kubenswrapper[4788]: I1211 10:29:57.908350 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-cxd56" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.193574 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nlbjk/crc-debug-jtf8z"] Dec 11 10:29:58 crc kubenswrapper[4788]: E1211 10:29:58.194332 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e046bd6-7873-4aec-bad0-6c3c80e92731" containerName="container-00" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.194346 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e046bd6-7873-4aec-bad0-6c3c80e92731" containerName="container-00" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.194562 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e046bd6-7873-4aec-bad0-6c3c80e92731" containerName="container-00" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.195388 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.218114 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r2r6\" (UniqueName: \"kubernetes.io/projected/99d2cd6e-ce47-4549-8862-3e87529aee5f-kube-api-access-4r2r6\") pod \"crc-debug-jtf8z\" (UID: \"99d2cd6e-ce47-4549-8862-3e87529aee5f\") " pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.218214 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/99d2cd6e-ce47-4549-8862-3e87529aee5f-host\") pod \"crc-debug-jtf8z\" (UID: \"99d2cd6e-ce47-4549-8862-3e87529aee5f\") " pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.320421 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r2r6\" (UniqueName: \"kubernetes.io/projected/99d2cd6e-ce47-4549-8862-3e87529aee5f-kube-api-access-4r2r6\") pod \"crc-debug-jtf8z\" (UID: \"99d2cd6e-ce47-4549-8862-3e87529aee5f\") " pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.320568 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/99d2cd6e-ce47-4549-8862-3e87529aee5f-host\") pod \"crc-debug-jtf8z\" (UID: \"99d2cd6e-ce47-4549-8862-3e87529aee5f\") " pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.320806 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/99d2cd6e-ce47-4549-8862-3e87529aee5f-host\") pod \"crc-debug-jtf8z\" (UID: \"99d2cd6e-ce47-4549-8862-3e87529aee5f\") " pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.341192 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r2r6\" (UniqueName: \"kubernetes.io/projected/99d2cd6e-ce47-4549-8862-3e87529aee5f-kube-api-access-4r2r6\") pod \"crc-debug-jtf8z\" (UID: \"99d2cd6e-ce47-4549-8862-3e87529aee5f\") " pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.511586 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e046bd6-7873-4aec-bad0-6c3c80e92731" path="/var/lib/kubelet/pods/0e046bd6-7873-4aec-bad0-6c3c80e92731/volumes" Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.517890 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" Dec 11 10:29:58 crc kubenswrapper[4788]: W1211 10:29:58.552023 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99d2cd6e_ce47_4549_8862_3e87529aee5f.slice/crio-12f63a06a54dcca7e32b0b182660c58fe446c8ffec5923607d44e23dbf33085c WatchSource:0}: Error finding container 12f63a06a54dcca7e32b0b182660c58fe446c8ffec5923607d44e23dbf33085c: Status 404 returned error can't find the container with id 12f63a06a54dcca7e32b0b182660c58fe446c8ffec5923607d44e23dbf33085c Dec 11 10:29:58 crc kubenswrapper[4788]: I1211 10:29:58.922457 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" event={"ID":"99d2cd6e-ce47-4549-8862-3e87529aee5f","Type":"ContainerStarted","Data":"12f63a06a54dcca7e32b0b182660c58fe446c8ffec5923607d44e23dbf33085c"} Dec 11 10:29:59 crc kubenswrapper[4788]: I1211 10:29:59.812210 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:59 crc kubenswrapper[4788]: I1211 10:29:59.813434 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:59 crc kubenswrapper[4788]: I1211 10:29:59.866273 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:29:59 crc kubenswrapper[4788]: I1211 10:29:59.935572 4788 generic.go:334] "Generic (PLEG): container finished" podID="99d2cd6e-ce47-4549-8862-3e87529aee5f" containerID="eafb2525b345e6cba47a6ec0707d9593333efcbbcc98e35918cf9a691d22ea4e" exitCode=0 Dec 11 10:29:59 crc kubenswrapper[4788]: I1211 10:29:59.936241 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" event={"ID":"99d2cd6e-ce47-4549-8862-3e87529aee5f","Type":"ContainerDied","Data":"eafb2525b345e6cba47a6ec0707d9593333efcbbcc98e35918cf9a691d22ea4e"} Dec 11 10:29:59 crc kubenswrapper[4788]: I1211 10:29:59.987983 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nlbjk/crc-debug-jtf8z"] Dec 11 10:29:59 crc kubenswrapper[4788]: I1211 10:29:59.996985 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nlbjk/crc-debug-jtf8z"] Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.002114 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.105872 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zn56d"] Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.182971 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv"] Dec 11 10:30:00 crc kubenswrapper[4788]: E1211 10:30:00.183586 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99d2cd6e-ce47-4549-8862-3e87529aee5f" containerName="container-00" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.183614 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="99d2cd6e-ce47-4549-8862-3e87529aee5f" containerName="container-00" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.183848 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="99d2cd6e-ce47-4549-8862-3e87529aee5f" containerName="container-00" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.185635 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.189842 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.190068 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.205941 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv"] Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.258297 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcxns\" (UniqueName: \"kubernetes.io/projected/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-kube-api-access-kcxns\") pod \"collect-profiles-29424150-nm6bv\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.258761 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-config-volume\") pod \"collect-profiles-29424150-nm6bv\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.258867 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-secret-volume\") pod \"collect-profiles-29424150-nm6bv\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.360905 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-secret-volume\") pod \"collect-profiles-29424150-nm6bv\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.361101 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcxns\" (UniqueName: \"kubernetes.io/projected/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-kube-api-access-kcxns\") pod \"collect-profiles-29424150-nm6bv\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.361351 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-config-volume\") pod \"collect-profiles-29424150-nm6bv\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.362959 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-config-volume\") pod \"collect-profiles-29424150-nm6bv\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.378561 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-secret-volume\") pod \"collect-profiles-29424150-nm6bv\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.381329 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcxns\" (UniqueName: \"kubernetes.io/projected/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-kube-api-access-kcxns\") pod \"collect-profiles-29424150-nm6bv\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.516749 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:00 crc kubenswrapper[4788]: W1211 10:30:00.986812 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85d6cf37_bf3a_4d4d_bc1c_20600947ab8d.slice/crio-7febbbbc2258047b613c46632e3f5d00a741ec0b121653d8d52abb99c68fabee WatchSource:0}: Error finding container 7febbbbc2258047b613c46632e3f5d00a741ec0b121653d8d52abb99c68fabee: Status 404 returned error can't find the container with id 7febbbbc2258047b613c46632e3f5d00a741ec0b121653d8d52abb99c68fabee Dec 11 10:30:00 crc kubenswrapper[4788]: I1211 10:30:00.989624 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv"] Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.037498 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.074892 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/99d2cd6e-ce47-4549-8862-3e87529aee5f-host\") pod \"99d2cd6e-ce47-4549-8862-3e87529aee5f\" (UID: \"99d2cd6e-ce47-4549-8862-3e87529aee5f\") " Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.075023 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/99d2cd6e-ce47-4549-8862-3e87529aee5f-host" (OuterVolumeSpecName: "host") pod "99d2cd6e-ce47-4549-8862-3e87529aee5f" (UID: "99d2cd6e-ce47-4549-8862-3e87529aee5f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.075157 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r2r6\" (UniqueName: \"kubernetes.io/projected/99d2cd6e-ce47-4549-8862-3e87529aee5f-kube-api-access-4r2r6\") pod \"99d2cd6e-ce47-4549-8862-3e87529aee5f\" (UID: \"99d2cd6e-ce47-4549-8862-3e87529aee5f\") " Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.075834 4788 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/99d2cd6e-ce47-4549-8862-3e87529aee5f-host\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.085488 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99d2cd6e-ce47-4549-8862-3e87529aee5f-kube-api-access-4r2r6" (OuterVolumeSpecName: "kube-api-access-4r2r6") pod "99d2cd6e-ce47-4549-8862-3e87529aee5f" (UID: "99d2cd6e-ce47-4549-8862-3e87529aee5f"). InnerVolumeSpecName "kube-api-access-4r2r6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.178152 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r2r6\" (UniqueName: \"kubernetes.io/projected/99d2cd6e-ce47-4549-8862-3e87529aee5f-kube-api-access-4r2r6\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.962991 4788 scope.go:117] "RemoveContainer" containerID="eafb2525b345e6cba47a6ec0707d9593333efcbbcc98e35918cf9a691d22ea4e" Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.962997 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/crc-debug-jtf8z" Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.965991 4788 generic.go:334] "Generic (PLEG): container finished" podID="85d6cf37-bf3a-4d4d-bc1c-20600947ab8d" containerID="1e0511615e526dc186b3bc2c8a12383e68f8679431544c42265219c540ef3f7d" exitCode=0 Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.966128 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" event={"ID":"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d","Type":"ContainerDied","Data":"1e0511615e526dc186b3bc2c8a12383e68f8679431544c42265219c540ef3f7d"} Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.966205 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" event={"ID":"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d","Type":"ContainerStarted","Data":"7febbbbc2258047b613c46632e3f5d00a741ec0b121653d8d52abb99c68fabee"} Dec 11 10:30:01 crc kubenswrapper[4788]: I1211 10:30:01.966307 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zn56d" podUID="58247a11-270b-494d-b7fb-4e5025060189" containerName="registry-server" containerID="cri-o://76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873" gracePeriod=2 Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.452800 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.506040 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggqtn\" (UniqueName: \"kubernetes.io/projected/58247a11-270b-494d-b7fb-4e5025060189-kube-api-access-ggqtn\") pod \"58247a11-270b-494d-b7fb-4e5025060189\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.506193 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-utilities\") pod \"58247a11-270b-494d-b7fb-4e5025060189\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.506331 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-catalog-content\") pod \"58247a11-270b-494d-b7fb-4e5025060189\" (UID: \"58247a11-270b-494d-b7fb-4e5025060189\") " Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.508050 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-utilities" (OuterVolumeSpecName: "utilities") pod "58247a11-270b-494d-b7fb-4e5025060189" (UID: "58247a11-270b-494d-b7fb-4e5025060189"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.509450 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99d2cd6e-ce47-4549-8862-3e87529aee5f" path="/var/lib/kubelet/pods/99d2cd6e-ce47-4549-8862-3e87529aee5f/volumes" Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.521071 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58247a11-270b-494d-b7fb-4e5025060189-kube-api-access-ggqtn" (OuterVolumeSpecName: "kube-api-access-ggqtn") pod "58247a11-270b-494d-b7fb-4e5025060189" (UID: "58247a11-270b-494d-b7fb-4e5025060189"). InnerVolumeSpecName "kube-api-access-ggqtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.536776 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58247a11-270b-494d-b7fb-4e5025060189" (UID: "58247a11-270b-494d-b7fb-4e5025060189"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.609112 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggqtn\" (UniqueName: \"kubernetes.io/projected/58247a11-270b-494d-b7fb-4e5025060189-kube-api-access-ggqtn\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.609163 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.609172 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58247a11-270b-494d-b7fb-4e5025060189-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.977210 4788 generic.go:334] "Generic (PLEG): container finished" podID="58247a11-270b-494d-b7fb-4e5025060189" containerID="76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873" exitCode=0 Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.977332 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zn56d" Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.977346 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn56d" event={"ID":"58247a11-270b-494d-b7fb-4e5025060189","Type":"ContainerDied","Data":"76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873"} Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.977836 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn56d" event={"ID":"58247a11-270b-494d-b7fb-4e5025060189","Type":"ContainerDied","Data":"3896498e53645428c65189ff0ba489c15cff75ed6011e87f44a0a981a1d34c50"} Dec 11 10:30:02 crc kubenswrapper[4788]: I1211 10:30:02.977883 4788 scope.go:117] "RemoveContainer" containerID="76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.002954 4788 scope.go:117] "RemoveContainer" containerID="37a972b7c77d6f0324fb189fc0d15ad7d07a08de99cf8035768cf4b035ef1dbf" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.012853 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zn56d"] Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.024179 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zn56d"] Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.057842 4788 scope.go:117] "RemoveContainer" containerID="3ac1cb4c4260a66745caa1d7047b379a3335b7c04f5bf413a31e9fd6cfa2f934" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.085935 4788 scope.go:117] "RemoveContainer" containerID="76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873" Dec 11 10:30:03 crc kubenswrapper[4788]: E1211 10:30:03.091838 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873\": container with ID starting with 76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873 not found: ID does not exist" containerID="76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.091896 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873"} err="failed to get container status \"76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873\": rpc error: code = NotFound desc = could not find container \"76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873\": container with ID starting with 76b7401172d4242663a18c7aa5cf01d9ef2f4809bed5b3ef786d0b10a6a25873 not found: ID does not exist" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.091934 4788 scope.go:117] "RemoveContainer" containerID="37a972b7c77d6f0324fb189fc0d15ad7d07a08de99cf8035768cf4b035ef1dbf" Dec 11 10:30:03 crc kubenswrapper[4788]: E1211 10:30:03.093128 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37a972b7c77d6f0324fb189fc0d15ad7d07a08de99cf8035768cf4b035ef1dbf\": container with ID starting with 37a972b7c77d6f0324fb189fc0d15ad7d07a08de99cf8035768cf4b035ef1dbf not found: ID does not exist" containerID="37a972b7c77d6f0324fb189fc0d15ad7d07a08de99cf8035768cf4b035ef1dbf" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.093163 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37a972b7c77d6f0324fb189fc0d15ad7d07a08de99cf8035768cf4b035ef1dbf"} err="failed to get container status \"37a972b7c77d6f0324fb189fc0d15ad7d07a08de99cf8035768cf4b035ef1dbf\": rpc error: code = NotFound desc = could not find container \"37a972b7c77d6f0324fb189fc0d15ad7d07a08de99cf8035768cf4b035ef1dbf\": container with ID starting with 37a972b7c77d6f0324fb189fc0d15ad7d07a08de99cf8035768cf4b035ef1dbf not found: ID does not exist" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.093184 4788 scope.go:117] "RemoveContainer" containerID="3ac1cb4c4260a66745caa1d7047b379a3335b7c04f5bf413a31e9fd6cfa2f934" Dec 11 10:30:03 crc kubenswrapper[4788]: E1211 10:30:03.093473 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ac1cb4c4260a66745caa1d7047b379a3335b7c04f5bf413a31e9fd6cfa2f934\": container with ID starting with 3ac1cb4c4260a66745caa1d7047b379a3335b7c04f5bf413a31e9fd6cfa2f934 not found: ID does not exist" containerID="3ac1cb4c4260a66745caa1d7047b379a3335b7c04f5bf413a31e9fd6cfa2f934" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.093513 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ac1cb4c4260a66745caa1d7047b379a3335b7c04f5bf413a31e9fd6cfa2f934"} err="failed to get container status \"3ac1cb4c4260a66745caa1d7047b379a3335b7c04f5bf413a31e9fd6cfa2f934\": rpc error: code = NotFound desc = could not find container \"3ac1cb4c4260a66745caa1d7047b379a3335b7c04f5bf413a31e9fd6cfa2f934\": container with ID starting with 3ac1cb4c4260a66745caa1d7047b379a3335b7c04f5bf413a31e9fd6cfa2f934 not found: ID does not exist" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.345428 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.528780 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcxns\" (UniqueName: \"kubernetes.io/projected/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-kube-api-access-kcxns\") pod \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.530421 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-config-volume\") pod \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.530555 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-secret-volume\") pod \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\" (UID: \"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d\") " Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.532082 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-config-volume" (OuterVolumeSpecName: "config-volume") pod "85d6cf37-bf3a-4d4d-bc1c-20600947ab8d" (UID: "85d6cf37-bf3a-4d4d-bc1c-20600947ab8d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.538521 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "85d6cf37-bf3a-4d4d-bc1c-20600947ab8d" (UID: "85d6cf37-bf3a-4d4d-bc1c-20600947ab8d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.538959 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-kube-api-access-kcxns" (OuterVolumeSpecName: "kube-api-access-kcxns") pod "85d6cf37-bf3a-4d4d-bc1c-20600947ab8d" (UID: "85d6cf37-bf3a-4d4d-bc1c-20600947ab8d"). InnerVolumeSpecName "kube-api-access-kcxns". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.634037 4788 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.634383 4788 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.634488 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcxns\" (UniqueName: \"kubernetes.io/projected/85d6cf37-bf3a-4d4d-bc1c-20600947ab8d-kube-api-access-kcxns\") on node \"crc\" DevicePath \"\"" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.992854 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" event={"ID":"85d6cf37-bf3a-4d4d-bc1c-20600947ab8d","Type":"ContainerDied","Data":"7febbbbc2258047b613c46632e3f5d00a741ec0b121653d8d52abb99c68fabee"} Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.992903 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7febbbbc2258047b613c46632e3f5d00a741ec0b121653d8d52abb99c68fabee" Dec 11 10:30:03 crc kubenswrapper[4788]: I1211 10:30:03.992929 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424150-nm6bv" Dec 11 10:30:04 crc kubenswrapper[4788]: I1211 10:30:04.455399 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j"] Dec 11 10:30:04 crc kubenswrapper[4788]: I1211 10:30:04.466840 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424105-kzc6j"] Dec 11 10:30:04 crc kubenswrapper[4788]: I1211 10:30:04.509900 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51837483-7a31-4aeb-9854-7a317f91fd06" path="/var/lib/kubelet/pods/51837483-7a31-4aeb-9854-7a317f91fd06/volumes" Dec 11 10:30:04 crc kubenswrapper[4788]: I1211 10:30:04.510819 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58247a11-270b-494d-b7fb-4e5025060189" path="/var/lib/kubelet/pods/58247a11-270b-494d-b7fb-4e5025060189/volumes" Dec 11 10:30:17 crc kubenswrapper[4788]: I1211 10:30:17.469017 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-65f8fcd946-pfr68_f4638f63-07df-47e5-942d-3061f2162f08/barbican-api/0.log" Dec 11 10:30:17 crc kubenswrapper[4788]: I1211 10:30:17.472759 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-65f8fcd946-pfr68_f4638f63-07df-47e5-942d-3061f2162f08/barbican-api-log/0.log" Dec 11 10:30:17 crc kubenswrapper[4788]: I1211 10:30:17.668642 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6fd98b95d6-wxbsw_5e2739c9-c97e-4807-bde7-172073652810/barbican-keystone-listener-log/0.log" Dec 11 10:30:17 crc kubenswrapper[4788]: I1211 10:30:17.689069 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6fd98b95d6-wxbsw_5e2739c9-c97e-4807-bde7-172073652810/barbican-keystone-listener/0.log" Dec 11 10:30:17 crc kubenswrapper[4788]: I1211 10:30:17.724493 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-65644684d5-hmnmb_3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7/barbican-worker/0.log" Dec 11 10:30:17 crc kubenswrapper[4788]: I1211 10:30:17.890625 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-65644684d5-hmnmb_3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7/barbican-worker-log/0.log" Dec 11 10:30:17 crc kubenswrapper[4788]: I1211 10:30:17.984018 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g_f6a40a8b-5427-40c5-b48b-18df0deb1e39/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:18 crc kubenswrapper[4788]: I1211 10:30:18.126437 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7e44e4da-495b-45b4-bfef-27ccb5dd3a51/ceilometer-notification-agent/0.log" Dec 11 10:30:18 crc kubenswrapper[4788]: I1211 10:30:18.175429 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7e44e4da-495b-45b4-bfef-27ccb5dd3a51/ceilometer-central-agent/0.log" Dec 11 10:30:18 crc kubenswrapper[4788]: I1211 10:30:18.253362 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7e44e4da-495b-45b4-bfef-27ccb5dd3a51/proxy-httpd/0.log" Dec 11 10:30:18 crc kubenswrapper[4788]: I1211 10:30:18.287727 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7e44e4da-495b-45b4-bfef-27ccb5dd3a51/sg-core/0.log" Dec 11 10:30:18 crc kubenswrapper[4788]: I1211 10:30:18.476563 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_300a8660-46c4-426b-b4a2-3f713fe639b2/cinder-api/0.log" Dec 11 10:30:18 crc kubenswrapper[4788]: I1211 10:30:18.498690 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_300a8660-46c4-426b-b4a2-3f713fe639b2/cinder-api-log/0.log" Dec 11 10:30:18 crc kubenswrapper[4788]: I1211 10:30:18.602243 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b29f4b83-e1ef-49cf-82eb-e7f080c7b28b/cinder-scheduler/0.log" Dec 11 10:30:18 crc kubenswrapper[4788]: I1211 10:30:18.713256 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b29f4b83-e1ef-49cf-82eb-e7f080c7b28b/probe/0.log" Dec 11 10:30:18 crc kubenswrapper[4788]: I1211 10:30:18.757854 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk_1203ac2a-acfa-4b1a-bba7-97eff5508d35/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:19 crc kubenswrapper[4788]: I1211 10:30:19.100135 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg_7eb3182b-8f0a-4c94-b59b-4d631cd5f52d/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:19 crc kubenswrapper[4788]: I1211 10:30:19.107212 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-7bm7g_f620c56e-f069-4bd1-9dc9-092bf9beaf91/init/0.log" Dec 11 10:30:19 crc kubenswrapper[4788]: I1211 10:30:19.757613 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-7bm7g_f620c56e-f069-4bd1-9dc9-092bf9beaf91/init/0.log" Dec 11 10:30:19 crc kubenswrapper[4788]: I1211 10:30:19.814022 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-7bm7g_f620c56e-f069-4bd1-9dc9-092bf9beaf91/dnsmasq-dns/0.log" Dec 11 10:30:19 crc kubenswrapper[4788]: I1211 10:30:19.829921 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d_8cceb164-ffe1-4a11-83e9-888f72ad58f0/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:20 crc kubenswrapper[4788]: I1211 10:30:20.009161 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_621ba590-fb77-4a71-a559-62c75a7f15dc/glance-log/0.log" Dec 11 10:30:20 crc kubenswrapper[4788]: I1211 10:30:20.046473 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_621ba590-fb77-4a71-a559-62c75a7f15dc/glance-httpd/0.log" Dec 11 10:30:20 crc kubenswrapper[4788]: I1211 10:30:20.224926 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_67b7dab2-3849-4249-99b5-63547063f12b/glance-httpd/0.log" Dec 11 10:30:20 crc kubenswrapper[4788]: I1211 10:30:20.293927 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_67b7dab2-3849-4249-99b5-63547063f12b/glance-log/0.log" Dec 11 10:30:20 crc kubenswrapper[4788]: I1211 10:30:20.415670 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-84d5c869dd-hzg6f_3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251/horizon/0.log" Dec 11 10:30:21 crc kubenswrapper[4788]: I1211 10:30:21.227379 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-cclp8_ab20916c-42ba-431e-af33-cf55f453378e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:21 crc kubenswrapper[4788]: I1211 10:30:21.252029 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-fkd9r_fdb8e800-98d6-4c2a-be3a-773e70a3dbff/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:21 crc kubenswrapper[4788]: I1211 10:30:21.270172 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29424121-g2d82_f1b95b41-dc96-4424-84a4-3c647a46ef4e/keystone-cron/0.log" Dec 11 10:30:21 crc kubenswrapper[4788]: I1211 10:30:21.368785 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:30:21 crc kubenswrapper[4788]: I1211 10:30:21.368846 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:30:21 crc kubenswrapper[4788]: I1211 10:30:21.446773 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-84d5c869dd-hzg6f_3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251/horizon-log/0.log" Dec 11 10:30:21 crc kubenswrapper[4788]: I1211 10:30:21.471652 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_6679f5b3-4784-41d8-8475-fc65b77bb7c7/kube-state-metrics/0.log" Dec 11 10:30:21 crc kubenswrapper[4788]: I1211 10:30:21.537657 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-c99b79967-dmp47_97f22e9d-3a9b-420e-a97d-0421c447bebb/keystone-api/0.log" Dec 11 10:30:21 crc kubenswrapper[4788]: I1211 10:30:21.747876 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8_20db1208-f411-4f0b-87da-e10fc9a8c4f9/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:22 crc kubenswrapper[4788]: I1211 10:30:22.040770 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-564d966fb9-4l95x_ac53e60f-bd33-417c-b606-cbe350b6597a/neutron-api/0.log" Dec 11 10:30:22 crc kubenswrapper[4788]: I1211 10:30:22.079637 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl_625e8b87-3138-45b9-935d-d26e22240053/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:22 crc kubenswrapper[4788]: I1211 10:30:22.124915 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-564d966fb9-4l95x_ac53e60f-bd33-417c-b606-cbe350b6597a/neutron-httpd/0.log" Dec 11 10:30:22 crc kubenswrapper[4788]: I1211 10:30:22.736490 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0d35e32a-7f87-4a69-9233-7d8bb40fec75/nova-api-log/0.log" Dec 11 10:30:22 crc kubenswrapper[4788]: I1211 10:30:22.766604 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_e0dba180-1bb0-4596-be23-66721a174129/nova-cell0-conductor-conductor/0.log" Dec 11 10:30:23 crc kubenswrapper[4788]: I1211 10:30:23.080389 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0d35e32a-7f87-4a69-9233-7d8bb40fec75/nova-api-api/0.log" Dec 11 10:30:23 crc kubenswrapper[4788]: I1211 10:30:23.156687 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_a1fd7161-dba8-481a-946b-07baf45ffcdf/nova-cell1-novncproxy-novncproxy/0.log" Dec 11 10:30:23 crc kubenswrapper[4788]: I1211 10:30:23.156857 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_d22ef65f-e312-488f-8607-514c224411e8/nova-cell1-conductor-conductor/0.log" Dec 11 10:30:23 crc kubenswrapper[4788]: I1211 10:30:23.476654 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-5mm6f_2d2d45a1-244d-4e91-be2b-db12eb484a25/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:23 crc kubenswrapper[4788]: I1211 10:30:23.612116 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d255cc24-14bd-4114-938b-c91acbe5c5d2/nova-metadata-log/0.log" Dec 11 10:30:23 crc kubenswrapper[4788]: I1211 10:30:23.977343 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_b97d64be-b549-4abc-92a4-be155a300b1a/nova-scheduler-scheduler/0.log" Dec 11 10:30:24 crc kubenswrapper[4788]: I1211 10:30:24.076954 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f98e1b46-07d0-44d9-810c-4a778d44837d/mysql-bootstrap/0.log" Dec 11 10:30:24 crc kubenswrapper[4788]: I1211 10:30:24.409357 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f98e1b46-07d0-44d9-810c-4a778d44837d/galera/0.log" Dec 11 10:30:24 crc kubenswrapper[4788]: I1211 10:30:24.412528 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f98e1b46-07d0-44d9-810c-4a778d44837d/mysql-bootstrap/0.log" Dec 11 10:30:24 crc kubenswrapper[4788]: I1211 10:30:24.650185 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2552ec58-e76a-4c17-ab79-ac237c6d972c/mysql-bootstrap/0.log" Dec 11 10:30:24 crc kubenswrapper[4788]: I1211 10:30:24.825739 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2552ec58-e76a-4c17-ab79-ac237c6d972c/mysql-bootstrap/0.log" Dec 11 10:30:24 crc kubenswrapper[4788]: I1211 10:30:24.882314 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2552ec58-e76a-4c17-ab79-ac237c6d972c/galera/0.log" Dec 11 10:30:25 crc kubenswrapper[4788]: I1211 10:30:25.145756 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-5hgrc_b899f552-09d4-4919-a3f1-79ff044cd435/ovn-controller/0.log" Dec 11 10:30:25 crc kubenswrapper[4788]: I1211 10:30:25.155899 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d255cc24-14bd-4114-938b-c91acbe5c5d2/nova-metadata-metadata/0.log" Dec 11 10:30:25 crc kubenswrapper[4788]: I1211 10:30:25.528881 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_4bae27c1-f73d-4bdb-91a2-185dd601bc33/openstackclient/0.log" Dec 11 10:30:25 crc kubenswrapper[4788]: I1211 10:30:25.685713 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-gpspm_f000044f-b0a7-417e-8278-5deb090b8105/openstack-network-exporter/0.log" Dec 11 10:30:25 crc kubenswrapper[4788]: I1211 10:30:25.799533 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7474g_ffcb62ce-c938-498b-9026-8fe40512245a/ovsdb-server-init/0.log" Dec 11 10:30:26 crc kubenswrapper[4788]: I1211 10:30:26.025147 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7474g_ffcb62ce-c938-498b-9026-8fe40512245a/ovsdb-server-init/0.log" Dec 11 10:30:26 crc kubenswrapper[4788]: I1211 10:30:26.047748 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7474g_ffcb62ce-c938-498b-9026-8fe40512245a/ovs-vswitchd/0.log" Dec 11 10:30:26 crc kubenswrapper[4788]: I1211 10:30:26.085391 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7474g_ffcb62ce-c938-498b-9026-8fe40512245a/ovsdb-server/0.log" Dec 11 10:30:26 crc kubenswrapper[4788]: I1211 10:30:26.353108 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb/openstack-network-exporter/0.log" Dec 11 10:30:26 crc kubenswrapper[4788]: I1211 10:30:26.373337 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-wh8qs_6de52d95-9e8d-402b-9fd3-3431a58a61e8/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:26 crc kubenswrapper[4788]: I1211 10:30:26.431511 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb/ovn-northd/0.log" Dec 11 10:30:26 crc kubenswrapper[4788]: I1211 10:30:26.633063 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3858a264-6dc8-4a58-8e80-3d57649da896/openstack-network-exporter/0.log" Dec 11 10:30:26 crc kubenswrapper[4788]: I1211 10:30:26.711502 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3858a264-6dc8-4a58-8e80-3d57649da896/ovsdbserver-nb/0.log" Dec 11 10:30:26 crc kubenswrapper[4788]: I1211 10:30:26.893932 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_428e76c9-65ed-434c-a25d-6bcd956b48d5/openstack-network-exporter/0.log" Dec 11 10:30:26 crc kubenswrapper[4788]: I1211 10:30:26.976356 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_428e76c9-65ed-434c-a25d-6bcd956b48d5/ovsdbserver-sb/0.log" Dec 11 10:30:27 crc kubenswrapper[4788]: I1211 10:30:27.179569 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-564d895c68-qq26m_6e6612ef-ed63-43eb-a29c-a4dee4798be8/placement-api/0.log" Dec 11 10:30:27 crc kubenswrapper[4788]: I1211 10:30:27.310106 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-564d895c68-qq26m_6e6612ef-ed63-43eb-a29c-a4dee4798be8/placement-log/0.log" Dec 11 10:30:27 crc kubenswrapper[4788]: I1211 10:30:27.338199 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_866f442b-155f-40fb-836d-3cc2add24e36/setup-container/0.log" Dec 11 10:30:27 crc kubenswrapper[4788]: I1211 10:30:27.587600 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_866f442b-155f-40fb-836d-3cc2add24e36/setup-container/0.log" Dec 11 10:30:27 crc kubenswrapper[4788]: I1211 10:30:27.591513 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_866f442b-155f-40fb-836d-3cc2add24e36/rabbitmq/0.log" Dec 11 10:30:27 crc kubenswrapper[4788]: I1211 10:30:27.598176 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2e0d2c67-915a-4461-ab83-75e349c18950/setup-container/0.log" Dec 11 10:30:27 crc kubenswrapper[4788]: I1211 10:30:27.824360 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2e0d2c67-915a-4461-ab83-75e349c18950/setup-container/0.log" Dec 11 10:30:27 crc kubenswrapper[4788]: I1211 10:30:27.881014 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2e0d2c67-915a-4461-ab83-75e349c18950/rabbitmq/0.log" Dec 11 10:30:28 crc kubenswrapper[4788]: I1211 10:30:28.540271 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm_ecf372fc-dfed-418f-904f-8a2e485acbcd/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:28 crc kubenswrapper[4788]: I1211 10:30:28.708190 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-phwpb_3b286cdf-f5f9-4ad2-ab37-0e4697309be5/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:28 crc kubenswrapper[4788]: I1211 10:30:28.847110 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb_888d4b4c-ac58-4ac8-8c53-fa41a750aaef/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:29 crc kubenswrapper[4788]: I1211 10:30:29.169019 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-qwvh4_5459ff9c-bd04-47d1-ade7-e52983b8fc86/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:29 crc kubenswrapper[4788]: I1211 10:30:29.253334 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-w29p9_bb7a8155-00dc-4dc1-9fc3-92417a6264ff/ssh-known-hosts-edpm-deployment/0.log" Dec 11 10:30:29 crc kubenswrapper[4788]: I1211 10:30:29.455483 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-d6d65bd59-g294c_c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5/proxy-server/0.log" Dec 11 10:30:29 crc kubenswrapper[4788]: I1211 10:30:29.572170 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-d6d65bd59-g294c_c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5/proxy-httpd/0.log" Dec 11 10:30:29 crc kubenswrapper[4788]: I1211 10:30:29.631950 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-m5bdb_6f5299be-0ac1-4048-b2aa-6a07ce5c30e1/swift-ring-rebalance/0.log" Dec 11 10:30:29 crc kubenswrapper[4788]: I1211 10:30:29.770171 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/account-auditor/0.log" Dec 11 10:30:29 crc kubenswrapper[4788]: I1211 10:30:29.810480 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/account-reaper/0.log" Dec 11 10:30:29 crc kubenswrapper[4788]: I1211 10:30:29.882595 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/account-replicator/0.log" Dec 11 10:30:29 crc kubenswrapper[4788]: I1211 10:30:29.947381 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/account-server/0.log" Dec 11 10:30:30 crc kubenswrapper[4788]: I1211 10:30:30.003442 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/container-auditor/0.log" Dec 11 10:30:30 crc kubenswrapper[4788]: I1211 10:30:30.075456 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/container-replicator/0.log" Dec 11 10:30:30 crc kubenswrapper[4788]: I1211 10:30:30.764000 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/container-server/0.log" Dec 11 10:30:30 crc kubenswrapper[4788]: I1211 10:30:30.812057 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/container-updater/0.log" Dec 11 10:30:30 crc kubenswrapper[4788]: I1211 10:30:30.816770 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/object-expirer/0.log" Dec 11 10:30:30 crc kubenswrapper[4788]: I1211 10:30:30.819487 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/object-auditor/0.log" Dec 11 10:30:31 crc kubenswrapper[4788]: I1211 10:30:31.016618 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/object-replicator/0.log" Dec 11 10:30:31 crc kubenswrapper[4788]: I1211 10:30:31.085650 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/object-updater/0.log" Dec 11 10:30:31 crc kubenswrapper[4788]: I1211 10:30:31.096025 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/object-server/0.log" Dec 11 10:30:31 crc kubenswrapper[4788]: I1211 10:30:31.129993 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/rsync/0.log" Dec 11 10:30:31 crc kubenswrapper[4788]: I1211 10:30:31.262431 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/swift-recon-cron/0.log" Dec 11 10:30:31 crc kubenswrapper[4788]: I1211 10:30:31.480915 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7_5395fbbe-5f31-4c60-bee6-09b492d13e36/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:31 crc kubenswrapper[4788]: I1211 10:30:31.532627 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_5cf588e9-b8ff-4862-9a71-8e6805d89b2a/tempest-tests-tempest-tests-runner/0.log" Dec 11 10:30:31 crc kubenswrapper[4788]: I1211 10:30:31.670426 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_c7921196-c38d-467e-b733-20416c265305/test-operator-logs-container/0.log" Dec 11 10:30:31 crc kubenswrapper[4788]: I1211 10:30:31.780024 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-g8htq_f49b48e9-71fc-4a17-9cf2-c5831649bda2/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:30:39 crc kubenswrapper[4788]: I1211 10:30:39.762621 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-galera-0" podUID="2552ec58-e76a-4c17-ab79-ac237c6d972c" containerName="galera" probeResult="failure" output="command timed out" Dec 11 10:30:41 crc kubenswrapper[4788]: I1211 10:30:41.143431 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_eba9b8bc-1fe1-4ba9-9521-a21c25bed6be/memcached/0.log" Dec 11 10:30:47 crc kubenswrapper[4788]: I1211 10:30:47.587546 4788 scope.go:117] "RemoveContainer" containerID="09fab16fa9a14e119cf6a9e1f76aa4ec2cfbb312782ff53cc1b4ef24bae91946" Dec 11 10:30:51 crc kubenswrapper[4788]: I1211 10:30:51.369626 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:30:51 crc kubenswrapper[4788]: I1211 10:30:51.370301 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:30:58 crc kubenswrapper[4788]: I1211 10:30:58.055054 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-nnspt_03f510d3-616e-454c-9086-687604b0cff1/kube-rbac-proxy/0.log" Dec 11 10:30:58 crc kubenswrapper[4788]: I1211 10:30:58.146198 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-nnspt_03f510d3-616e-454c-9086-687604b0cff1/manager/0.log" Dec 11 10:30:58 crc kubenswrapper[4788]: I1211 10:30:58.267580 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-qr4bz_f2525214-ff81-4638-baa5-afcd178f9ec6/kube-rbac-proxy/0.log" Dec 11 10:30:58 crc kubenswrapper[4788]: I1211 10:30:58.345197 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-qr4bz_f2525214-ff81-4638-baa5-afcd178f9ec6/manager/0.log" Dec 11 10:30:58 crc kubenswrapper[4788]: I1211 10:30:58.476342 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-688sb_cb780059-66e2-48f4-913b-271489226ef9/kube-rbac-proxy/0.log" Dec 11 10:30:58 crc kubenswrapper[4788]: I1211 10:30:58.477111 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-688sb_cb780059-66e2-48f4-913b-271489226ef9/manager/0.log" Dec 11 10:30:58 crc kubenswrapper[4788]: I1211 10:30:58.611613 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/util/0.log" Dec 11 10:30:58 crc kubenswrapper[4788]: I1211 10:30:58.820643 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/pull/0.log" Dec 11 10:30:58 crc kubenswrapper[4788]: I1211 10:30:58.856351 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/pull/0.log" Dec 11 10:30:58 crc kubenswrapper[4788]: I1211 10:30:58.880617 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/util/0.log" Dec 11 10:30:59 crc kubenswrapper[4788]: I1211 10:30:59.016601 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/util/0.log" Dec 11 10:30:59 crc kubenswrapper[4788]: I1211 10:30:59.054341 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/pull/0.log" Dec 11 10:30:59 crc kubenswrapper[4788]: I1211 10:30:59.086675 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/extract/0.log" Dec 11 10:30:59 crc kubenswrapper[4788]: I1211 10:30:59.236750 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-kdjld_684c9c50-d818-41d8-852d-82f5937c18ab/kube-rbac-proxy/0.log" Dec 11 10:30:59 crc kubenswrapper[4788]: I1211 10:30:59.315906 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-kdjld_684c9c50-d818-41d8-852d-82f5937c18ab/manager/0.log" Dec 11 10:30:59 crc kubenswrapper[4788]: I1211 10:30:59.395922 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-zr4vh_0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795/kube-rbac-proxy/0.log" Dec 11 10:30:59 crc kubenswrapper[4788]: I1211 10:30:59.471450 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-zr4vh_0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795/manager/0.log" Dec 11 10:30:59 crc kubenswrapper[4788]: I1211 10:30:59.542072 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-p9v6z_348b99db-d5ca-41e2-b2a0-f22f6aeca6b0/kube-rbac-proxy/0.log" Dec 11 10:30:59 crc kubenswrapper[4788]: I1211 10:30:59.694006 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-p9v6z_348b99db-d5ca-41e2-b2a0-f22f6aeca6b0/manager/0.log" Dec 11 10:30:59 crc kubenswrapper[4788]: I1211 10:30:59.804999 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-b7lzd_2868074d-eb62-4d8a-b275-047d72fec830/kube-rbac-proxy/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.017194 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-vzs5b_7707f72a-2719-46de-8409-b8d397a4ce03/kube-rbac-proxy/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.021313 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-b7lzd_2868074d-eb62-4d8a-b275-047d72fec830/manager/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.057554 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-vzs5b_7707f72a-2719-46de-8409-b8d397a4ce03/manager/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.194280 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-mpztw_bf947be7-c3ef-4ae6-beff-11d5ae6d1f94/kube-rbac-proxy/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.288775 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-mpztw_bf947be7-c3ef-4ae6-beff-11d5ae6d1f94/manager/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.396784 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-lcx6f_354b87a3-d193-427f-8620-f7fcb52acb67/kube-rbac-proxy/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.413835 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-lcx6f_354b87a3-d193-427f-8620-f7fcb52acb67/manager/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.479197 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-dmnk5_d78d8bad-e298-41b5-82fa-d4cf464d28dd/kube-rbac-proxy/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.614757 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-dmnk5_d78d8bad-e298-41b5-82fa-d4cf464d28dd/manager/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.656892 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-hlfq2_f7a4db33-474d-496e-b745-939ce842904d/kube-rbac-proxy/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.760122 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-hlfq2_f7a4db33-474d-496e-b745-939ce842904d/manager/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.878278 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-pkxtw_5fa7cb98-c29a-4efb-81ff-710523478ec0/kube-rbac-proxy/0.log" Dec 11 10:31:00 crc kubenswrapper[4788]: I1211 10:31:00.926053 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-pkxtw_5fa7cb98-c29a-4efb-81ff-710523478ec0/manager/0.log" Dec 11 10:31:01 crc kubenswrapper[4788]: I1211 10:31:01.043718 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-brq4g_556f2b13-91d6-4261-9e7a-bed452e436eb/kube-rbac-proxy/0.log" Dec 11 10:31:01 crc kubenswrapper[4788]: I1211 10:31:01.097689 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-brq4g_556f2b13-91d6-4261-9e7a-bed452e436eb/manager/0.log" Dec 11 10:31:01 crc kubenswrapper[4788]: I1211 10:31:01.256748 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879ff2lw9_6491a596-c46a-45c7-9430-4d9f6a40a6d2/kube-rbac-proxy/0.log" Dec 11 10:31:01 crc kubenswrapper[4788]: I1211 10:31:01.302218 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879ff2lw9_6491a596-c46a-45c7-9430-4d9f6a40a6d2/manager/0.log" Dec 11 10:31:01 crc kubenswrapper[4788]: I1211 10:31:01.711024 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-8g2bh_78fdcbf0-ff20-43b8-bf9a-ded097de063c/registry-server/0.log" Dec 11 10:31:01 crc kubenswrapper[4788]: I1211 10:31:01.772257 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7dcd4874fd-59fm4_3ad7c2ee-56fc-41ad-9f63-2697aa291948/operator/0.log" Dec 11 10:31:01 crc kubenswrapper[4788]: I1211 10:31:01.960384 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-zqgjl_c0d61f31-e8b5-454d-8961-cedc33a2efa2/kube-rbac-proxy/0.log" Dec 11 10:31:02 crc kubenswrapper[4788]: I1211 10:31:02.198451 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-gz22l_f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d/kube-rbac-proxy/0.log" Dec 11 10:31:02 crc kubenswrapper[4788]: I1211 10:31:02.266176 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-zqgjl_c0d61f31-e8b5-454d-8961-cedc33a2efa2/manager/0.log" Dec 11 10:31:02 crc kubenswrapper[4788]: I1211 10:31:02.435939 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-gz22l_f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d/manager/0.log" Dec 11 10:31:02 crc kubenswrapper[4788]: I1211 10:31:02.489139 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-gbfnb_67082483-5ed3-4141-a7be-d3f95f5b07c4/operator/0.log" Dec 11 10:31:02 crc kubenswrapper[4788]: I1211 10:31:02.642253 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-65d64bdc49-5hs5g_f8ee8e1d-0519-4464-9ca1-17d37770dcdc/manager/0.log" Dec 11 10:31:02 crc kubenswrapper[4788]: I1211 10:31:02.695737 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-v87tj_7e94e3ad-b1bf-44e3-aa17-52380cb0e651/kube-rbac-proxy/0.log" Dec 11 10:31:02 crc kubenswrapper[4788]: I1211 10:31:02.715641 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-v87tj_7e94e3ad-b1bf-44e3-aa17-52380cb0e651/manager/0.log" Dec 11 10:31:02 crc kubenswrapper[4788]: I1211 10:31:02.754973 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-sxb22_f6e78ec3-ccc1-48b2-8ba6-962de2a25249/kube-rbac-proxy/0.log" Dec 11 10:31:02 crc kubenswrapper[4788]: I1211 10:31:02.921437 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-sxb22_f6e78ec3-ccc1-48b2-8ba6-962de2a25249/manager/0.log" Dec 11 10:31:02 crc kubenswrapper[4788]: I1211 10:31:02.927220 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-z7q4w_726d9db1-d370-4bea-b91d-6beff7ba4b6b/kube-rbac-proxy/0.log" Dec 11 10:31:02 crc kubenswrapper[4788]: I1211 10:31:02.959660 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-z7q4w_726d9db1-d370-4bea-b91d-6beff7ba4b6b/manager/0.log" Dec 11 10:31:03 crc kubenswrapper[4788]: I1211 10:31:03.109291 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75944c9b7-kggr7_ce133f4a-b1fd-4e51-8e4f-390d6f125e1d/kube-rbac-proxy/0.log" Dec 11 10:31:03 crc kubenswrapper[4788]: I1211 10:31:03.116886 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75944c9b7-kggr7_ce133f4a-b1fd-4e51-8e4f-390d6f125e1d/manager/0.log" Dec 11 10:31:21 crc kubenswrapper[4788]: I1211 10:31:21.369162 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:31:21 crc kubenswrapper[4788]: I1211 10:31:21.371091 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:31:21 crc kubenswrapper[4788]: I1211 10:31:21.371278 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 10:31:21 crc kubenswrapper[4788]: I1211 10:31:21.371938 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"875931243e5fca3f1d8e6275cbb949ff735c64d729ac906beafebb86ab24bc38"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:31:21 crc kubenswrapper[4788]: I1211 10:31:21.372039 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://875931243e5fca3f1d8e6275cbb949ff735c64d729ac906beafebb86ab24bc38" gracePeriod=600 Dec 11 10:31:21 crc kubenswrapper[4788]: I1211 10:31:21.835117 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="875931243e5fca3f1d8e6275cbb949ff735c64d729ac906beafebb86ab24bc38" exitCode=0 Dec 11 10:31:21 crc kubenswrapper[4788]: I1211 10:31:21.835153 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"875931243e5fca3f1d8e6275cbb949ff735c64d729ac906beafebb86ab24bc38"} Dec 11 10:31:21 crc kubenswrapper[4788]: I1211 10:31:21.835539 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c"} Dec 11 10:31:21 crc kubenswrapper[4788]: I1211 10:31:21.835573 4788 scope.go:117] "RemoveContainer" containerID="11fe4623717a1cf76467467e9b694c50ea2a5290c82a2df955c87d179ceeb660" Dec 11 10:31:22 crc kubenswrapper[4788]: I1211 10:31:22.923422 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5ztq4_b39d101c-fbdd-427c-9369-cbfde9bb50cd/control-plane-machine-set-operator/0.log" Dec 11 10:31:23 crc kubenswrapper[4788]: I1211 10:31:23.088390 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-47nzd_0783fcec-bf1b-4910-b8c2-08d85c53093a/machine-api-operator/0.log" Dec 11 10:31:23 crc kubenswrapper[4788]: I1211 10:31:23.138217 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-47nzd_0783fcec-bf1b-4910-b8c2-08d85c53093a/kube-rbac-proxy/0.log" Dec 11 10:31:36 crc kubenswrapper[4788]: I1211 10:31:36.564150 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-hq8gs_c6917182-a497-4802-8747-4a6c3e78a11f/cert-manager-controller/0.log" Dec 11 10:31:36 crc kubenswrapper[4788]: I1211 10:31:36.744850 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-t6sfj_0493623c-4b7f-4f28-a1bb-9303b031d9a0/cert-manager-cainjector/0.log" Dec 11 10:31:36 crc kubenswrapper[4788]: I1211 10:31:36.813991 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-697m4_dea5ca68-6922-46e4-81ed-8c917c670214/cert-manager-webhook/0.log" Dec 11 10:31:49 crc kubenswrapper[4788]: I1211 10:31:49.408446 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6ff7998486-z27wt_06c736a4-1288-473a-bceb-0951aced851f/nmstate-console-plugin/0.log" Dec 11 10:31:49 crc kubenswrapper[4788]: I1211 10:31:49.560419 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-b6mlg_88b842a1-c94f-4a0a-b845-d5330f12a0a1/nmstate-handler/0.log" Dec 11 10:31:49 crc kubenswrapper[4788]: I1211 10:31:49.608776 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-fprr4_49a187f1-90cf-4afb-9dec-10bf57b8ff69/kube-rbac-proxy/0.log" Dec 11 10:31:49 crc kubenswrapper[4788]: I1211 10:31:49.651750 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-fprr4_49a187f1-90cf-4afb-9dec-10bf57b8ff69/nmstate-metrics/0.log" Dec 11 10:31:49 crc kubenswrapper[4788]: I1211 10:31:49.785398 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-6769fb99d-8dw7z_0f1f9ffb-58ec-4282-b3a1-d9040b09023b/nmstate-operator/0.log" Dec 11 10:31:49 crc kubenswrapper[4788]: I1211 10:31:49.867844 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-f8fb84555-4vbrf_5baf07b9-7c94-4c71-99ee-37b2e68d0437/nmstate-webhook/0.log" Dec 11 10:32:06 crc kubenswrapper[4788]: I1211 10:32:06.214685 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-82svd_ac39e555-61a7-48d2-976e-e3a095bae216/kube-rbac-proxy/0.log" Dec 11 10:32:06 crc kubenswrapper[4788]: I1211 10:32:06.342828 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-82svd_ac39e555-61a7-48d2-976e-e3a095bae216/controller/0.log" Dec 11 10:32:06 crc kubenswrapper[4788]: I1211 10:32:06.492221 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-frr-files/0.log" Dec 11 10:32:06 crc kubenswrapper[4788]: I1211 10:32:06.624415 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-reloader/0.log" Dec 11 10:32:06 crc kubenswrapper[4788]: I1211 10:32:06.656614 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-frr-files/0.log" Dec 11 10:32:06 crc kubenswrapper[4788]: I1211 10:32:06.665171 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-metrics/0.log" Dec 11 10:32:06 crc kubenswrapper[4788]: I1211 10:32:06.674552 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-reloader/0.log" Dec 11 10:32:06 crc kubenswrapper[4788]: I1211 10:32:06.924133 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-metrics/0.log" Dec 11 10:32:06 crc kubenswrapper[4788]: I1211 10:32:06.931938 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-reloader/0.log" Dec 11 10:32:06 crc kubenswrapper[4788]: I1211 10:32:06.933513 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-frr-files/0.log" Dec 11 10:32:06 crc kubenswrapper[4788]: I1211 10:32:06.977034 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-metrics/0.log" Dec 11 10:32:07 crc kubenswrapper[4788]: I1211 10:32:07.147790 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-reloader/0.log" Dec 11 10:32:07 crc kubenswrapper[4788]: I1211 10:32:07.151088 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-frr-files/0.log" Dec 11 10:32:07 crc kubenswrapper[4788]: I1211 10:32:07.155350 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-metrics/0.log" Dec 11 10:32:07 crc kubenswrapper[4788]: I1211 10:32:07.225734 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/controller/0.log" Dec 11 10:32:07 crc kubenswrapper[4788]: I1211 10:32:07.375562 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/kube-rbac-proxy/0.log" Dec 11 10:32:07 crc kubenswrapper[4788]: I1211 10:32:07.382389 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/frr-metrics/0.log" Dec 11 10:32:07 crc kubenswrapper[4788]: I1211 10:32:07.482542 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/kube-rbac-proxy-frr/0.log" Dec 11 10:32:07 crc kubenswrapper[4788]: I1211 10:32:07.668632 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/reloader/0.log" Dec 11 10:32:07 crc kubenswrapper[4788]: I1211 10:32:07.722443 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7784b6fcf-fc5pw_1ed60fe2-ac5e-46b3-a0f1-05436db532bb/frr-k8s-webhook-server/0.log" Dec 11 10:32:07 crc kubenswrapper[4788]: I1211 10:32:07.925565 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-64ccd66d65-8ldsp_d17f23e5-47b0-4b87-ab5c-32ac870eb738/manager/0.log" Dec 11 10:32:08 crc kubenswrapper[4788]: I1211 10:32:08.139123 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-57bc7764b6-mf4wf_2fec7bb6-d596-4da7-94d0-567cb78c94b2/webhook-server/0.log" Dec 11 10:32:08 crc kubenswrapper[4788]: I1211 10:32:08.203282 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-wfdvw_9edef12f-0a1f-45ad-8850-0d2edfc5384c/kube-rbac-proxy/0.log" Dec 11 10:32:08 crc kubenswrapper[4788]: I1211 10:32:08.789644 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/frr/0.log" Dec 11 10:32:08 crc kubenswrapper[4788]: I1211 10:32:08.845860 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-wfdvw_9edef12f-0a1f-45ad-8850-0d2edfc5384c/speaker/0.log" Dec 11 10:32:22 crc kubenswrapper[4788]: I1211 10:32:22.907183 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/util/0.log" Dec 11 10:32:23 crc kubenswrapper[4788]: I1211 10:32:23.010538 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/pull/0.log" Dec 11 10:32:23 crc kubenswrapper[4788]: I1211 10:32:23.041187 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/util/0.log" Dec 11 10:32:23 crc kubenswrapper[4788]: I1211 10:32:23.082413 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/pull/0.log" Dec 11 10:32:23 crc kubenswrapper[4788]: I1211 10:32:23.260242 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/util/0.log" Dec 11 10:32:23 crc kubenswrapper[4788]: I1211 10:32:23.290446 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/extract/0.log" Dec 11 10:32:23 crc kubenswrapper[4788]: I1211 10:32:23.299808 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/pull/0.log" Dec 11 10:32:23 crc kubenswrapper[4788]: I1211 10:32:23.516644 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/util/0.log" Dec 11 10:32:23 crc kubenswrapper[4788]: I1211 10:32:23.676288 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/pull/0.log" Dec 11 10:32:23 crc kubenswrapper[4788]: I1211 10:32:23.773673 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/util/0.log" Dec 11 10:32:23 crc kubenswrapper[4788]: I1211 10:32:23.783732 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/pull/0.log" Dec 11 10:32:23 crc kubenswrapper[4788]: I1211 10:32:23.974793 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/util/0.log" Dec 11 10:32:24 crc kubenswrapper[4788]: I1211 10:32:24.004063 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/pull/0.log" Dec 11 10:32:24 crc kubenswrapper[4788]: I1211 10:32:24.012962 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/extract/0.log" Dec 11 10:32:24 crc kubenswrapper[4788]: I1211 10:32:24.168984 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-utilities/0.log" Dec 11 10:32:24 crc kubenswrapper[4788]: I1211 10:32:24.407829 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-content/0.log" Dec 11 10:32:24 crc kubenswrapper[4788]: I1211 10:32:24.434649 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-utilities/0.log" Dec 11 10:32:24 crc kubenswrapper[4788]: I1211 10:32:24.435776 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-content/0.log" Dec 11 10:32:25 crc kubenswrapper[4788]: I1211 10:32:25.226922 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-utilities/0.log" Dec 11 10:32:25 crc kubenswrapper[4788]: I1211 10:32:25.358106 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-content/0.log" Dec 11 10:32:25 crc kubenswrapper[4788]: I1211 10:32:25.508717 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/registry-server/0.log" Dec 11 10:32:25 crc kubenswrapper[4788]: I1211 10:32:25.550092 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-utilities/0.log" Dec 11 10:32:25 crc kubenswrapper[4788]: I1211 10:32:25.741993 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-content/0.log" Dec 11 10:32:25 crc kubenswrapper[4788]: I1211 10:32:25.824643 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-utilities/0.log" Dec 11 10:32:25 crc kubenswrapper[4788]: I1211 10:32:25.861047 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-content/0.log" Dec 11 10:32:26 crc kubenswrapper[4788]: I1211 10:32:26.010807 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-utilities/0.log" Dec 11 10:32:26 crc kubenswrapper[4788]: I1211 10:32:26.032053 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-content/0.log" Dec 11 10:32:26 crc kubenswrapper[4788]: I1211 10:32:26.285443 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-qdm6m_3406c148-fa4c-403c-bf11-02f53cf14170/marketplace-operator/0.log" Dec 11 10:32:26 crc kubenswrapper[4788]: I1211 10:32:26.382970 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/registry-server/0.log" Dec 11 10:32:26 crc kubenswrapper[4788]: I1211 10:32:26.618030 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-utilities/0.log" Dec 11 10:32:27 crc kubenswrapper[4788]: I1211 10:32:27.428304 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-content/0.log" Dec 11 10:32:27 crc kubenswrapper[4788]: I1211 10:32:27.440490 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-utilities/0.log" Dec 11 10:32:27 crc kubenswrapper[4788]: I1211 10:32:27.472066 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-content/0.log" Dec 11 10:32:27 crc kubenswrapper[4788]: I1211 10:32:27.675978 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-utilities/0.log" Dec 11 10:32:27 crc kubenswrapper[4788]: I1211 10:32:27.681140 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-content/0.log" Dec 11 10:32:27 crc kubenswrapper[4788]: I1211 10:32:27.745290 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-utilities/0.log" Dec 11 10:32:27 crc kubenswrapper[4788]: I1211 10:32:27.869705 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/registry-server/0.log" Dec 11 10:32:27 crc kubenswrapper[4788]: I1211 10:32:27.939326 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-utilities/0.log" Dec 11 10:32:27 crc kubenswrapper[4788]: I1211 10:32:27.988767 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-content/0.log" Dec 11 10:32:28 crc kubenswrapper[4788]: I1211 10:32:28.003825 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-content/0.log" Dec 11 10:32:28 crc kubenswrapper[4788]: I1211 10:32:28.189040 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-utilities/0.log" Dec 11 10:32:28 crc kubenswrapper[4788]: I1211 10:32:28.211453 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-content/0.log" Dec 11 10:32:28 crc kubenswrapper[4788]: I1211 10:32:28.810678 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/registry-server/0.log" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.298138 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-m7djn"] Dec 11 10:32:35 crc kubenswrapper[4788]: E1211 10:32:35.299361 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58247a11-270b-494d-b7fb-4e5025060189" containerName="extract-utilities" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.299380 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="58247a11-270b-494d-b7fb-4e5025060189" containerName="extract-utilities" Dec 11 10:32:35 crc kubenswrapper[4788]: E1211 10:32:35.299412 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85d6cf37-bf3a-4d4d-bc1c-20600947ab8d" containerName="collect-profiles" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.299419 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="85d6cf37-bf3a-4d4d-bc1c-20600947ab8d" containerName="collect-profiles" Dec 11 10:32:35 crc kubenswrapper[4788]: E1211 10:32:35.299431 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58247a11-270b-494d-b7fb-4e5025060189" containerName="registry-server" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.299441 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="58247a11-270b-494d-b7fb-4e5025060189" containerName="registry-server" Dec 11 10:32:35 crc kubenswrapper[4788]: E1211 10:32:35.299464 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58247a11-270b-494d-b7fb-4e5025060189" containerName="extract-content" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.299473 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="58247a11-270b-494d-b7fb-4e5025060189" containerName="extract-content" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.299691 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="85d6cf37-bf3a-4d4d-bc1c-20600947ab8d" containerName="collect-profiles" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.299705 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="58247a11-270b-494d-b7fb-4e5025060189" containerName="registry-server" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.305520 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.314020 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m7djn"] Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.412655 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-catalog-content\") pod \"community-operators-m7djn\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.412882 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsdbt\" (UniqueName: \"kubernetes.io/projected/3b4d636e-0726-4baf-8ae3-4867685d49bd-kube-api-access-fsdbt\") pod \"community-operators-m7djn\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.413070 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-utilities\") pod \"community-operators-m7djn\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.515159 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsdbt\" (UniqueName: \"kubernetes.io/projected/3b4d636e-0726-4baf-8ae3-4867685d49bd-kube-api-access-fsdbt\") pod \"community-operators-m7djn\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.515595 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-utilities\") pod \"community-operators-m7djn\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.515664 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-catalog-content\") pod \"community-operators-m7djn\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.516204 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-catalog-content\") pod \"community-operators-m7djn\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:35 crc kubenswrapper[4788]: I1211 10:32:35.516888 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-utilities\") pod \"community-operators-m7djn\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:36 crc kubenswrapper[4788]: I1211 10:32:36.357904 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsdbt\" (UniqueName: \"kubernetes.io/projected/3b4d636e-0726-4baf-8ae3-4867685d49bd-kube-api-access-fsdbt\") pod \"community-operators-m7djn\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:36 crc kubenswrapper[4788]: I1211 10:32:36.524057 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:36 crc kubenswrapper[4788]: I1211 10:32:36.974686 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m7djn"] Dec 11 10:32:37 crc kubenswrapper[4788]: I1211 10:32:37.537426 4788 generic.go:334] "Generic (PLEG): container finished" podID="3b4d636e-0726-4baf-8ae3-4867685d49bd" containerID="73f6992db0798fb0d112eb902332a47e5f612a7a2cbb4f085dd1b3b344c74f40" exitCode=0 Dec 11 10:32:37 crc kubenswrapper[4788]: I1211 10:32:37.537484 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m7djn" event={"ID":"3b4d636e-0726-4baf-8ae3-4867685d49bd","Type":"ContainerDied","Data":"73f6992db0798fb0d112eb902332a47e5f612a7a2cbb4f085dd1b3b344c74f40"} Dec 11 10:32:37 crc kubenswrapper[4788]: I1211 10:32:37.537517 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m7djn" event={"ID":"3b4d636e-0726-4baf-8ae3-4867685d49bd","Type":"ContainerStarted","Data":"cca99d800f7ee9f03666c82851cb8fa33a1591fc7621c3ccffb257773011422c"} Dec 11 10:32:37 crc kubenswrapper[4788]: I1211 10:32:37.539613 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:32:38 crc kubenswrapper[4788]: I1211 10:32:38.548120 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m7djn" event={"ID":"3b4d636e-0726-4baf-8ae3-4867685d49bd","Type":"ContainerStarted","Data":"6c5408b3fcae49c9b6311170c54a181cbd80bca9b617b2e4576c5f2c47881eea"} Dec 11 10:32:39 crc kubenswrapper[4788]: I1211 10:32:39.558383 4788 generic.go:334] "Generic (PLEG): container finished" podID="3b4d636e-0726-4baf-8ae3-4867685d49bd" containerID="6c5408b3fcae49c9b6311170c54a181cbd80bca9b617b2e4576c5f2c47881eea" exitCode=0 Dec 11 10:32:39 crc kubenswrapper[4788]: I1211 10:32:39.558456 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m7djn" event={"ID":"3b4d636e-0726-4baf-8ae3-4867685d49bd","Type":"ContainerDied","Data":"6c5408b3fcae49c9b6311170c54a181cbd80bca9b617b2e4576c5f2c47881eea"} Dec 11 10:32:40 crc kubenswrapper[4788]: I1211 10:32:40.569214 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m7djn" event={"ID":"3b4d636e-0726-4baf-8ae3-4867685d49bd","Type":"ContainerStarted","Data":"7d4201392dde9a241786a24048576a788cd51392141119a696b504af62ccc32a"} Dec 11 10:32:40 crc kubenswrapper[4788]: I1211 10:32:40.622455 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-m7djn" podStartSLOduration=2.936371148 podStartE2EDuration="5.622423796s" podCreationTimestamp="2025-12-11 10:32:35 +0000 UTC" firstStartedPulling="2025-12-11 10:32:37.539356914 +0000 UTC m=+4287.610136500" lastFinishedPulling="2025-12-11 10:32:40.225409562 +0000 UTC m=+4290.296189148" observedRunningTime="2025-12-11 10:32:40.586551348 +0000 UTC m=+4290.657330934" watchObservedRunningTime="2025-12-11 10:32:40.622423796 +0000 UTC m=+4290.693203392" Dec 11 10:32:46 crc kubenswrapper[4788]: I1211 10:32:46.524270 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:46 crc kubenswrapper[4788]: I1211 10:32:46.524870 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:46 crc kubenswrapper[4788]: I1211 10:32:46.628490 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:46 crc kubenswrapper[4788]: I1211 10:32:46.701168 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:46 crc kubenswrapper[4788]: I1211 10:32:46.870838 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m7djn"] Dec 11 10:32:48 crc kubenswrapper[4788]: I1211 10:32:48.650659 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-m7djn" podUID="3b4d636e-0726-4baf-8ae3-4867685d49bd" containerName="registry-server" containerID="cri-o://7d4201392dde9a241786a24048576a788cd51392141119a696b504af62ccc32a" gracePeriod=2 Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.665932 4788 generic.go:334] "Generic (PLEG): container finished" podID="3b4d636e-0726-4baf-8ae3-4867685d49bd" containerID="7d4201392dde9a241786a24048576a788cd51392141119a696b504af62ccc32a" exitCode=0 Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.666281 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m7djn" event={"ID":"3b4d636e-0726-4baf-8ae3-4867685d49bd","Type":"ContainerDied","Data":"7d4201392dde9a241786a24048576a788cd51392141119a696b504af62ccc32a"} Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.666314 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m7djn" event={"ID":"3b4d636e-0726-4baf-8ae3-4867685d49bd","Type":"ContainerDied","Data":"cca99d800f7ee9f03666c82851cb8fa33a1591fc7621c3ccffb257773011422c"} Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.666327 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cca99d800f7ee9f03666c82851cb8fa33a1591fc7621c3ccffb257773011422c" Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.733321 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.863736 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-catalog-content\") pod \"3b4d636e-0726-4baf-8ae3-4867685d49bd\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.863896 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-utilities\") pod \"3b4d636e-0726-4baf-8ae3-4867685d49bd\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.863931 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsdbt\" (UniqueName: \"kubernetes.io/projected/3b4d636e-0726-4baf-8ae3-4867685d49bd-kube-api-access-fsdbt\") pod \"3b4d636e-0726-4baf-8ae3-4867685d49bd\" (UID: \"3b4d636e-0726-4baf-8ae3-4867685d49bd\") " Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.864714 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-utilities" (OuterVolumeSpecName: "utilities") pod "3b4d636e-0726-4baf-8ae3-4867685d49bd" (UID: "3b4d636e-0726-4baf-8ae3-4867685d49bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.882297 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b4d636e-0726-4baf-8ae3-4867685d49bd-kube-api-access-fsdbt" (OuterVolumeSpecName: "kube-api-access-fsdbt") pod "3b4d636e-0726-4baf-8ae3-4867685d49bd" (UID: "3b4d636e-0726-4baf-8ae3-4867685d49bd"). InnerVolumeSpecName "kube-api-access-fsdbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.919396 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3b4d636e-0726-4baf-8ae3-4867685d49bd" (UID: "3b4d636e-0726-4baf-8ae3-4867685d49bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.967130 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.967175 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsdbt\" (UniqueName: \"kubernetes.io/projected/3b4d636e-0726-4baf-8ae3-4867685d49bd-kube-api-access-fsdbt\") on node \"crc\" DevicePath \"\"" Dec 11 10:32:49 crc kubenswrapper[4788]: I1211 10:32:49.967190 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b4d636e-0726-4baf-8ae3-4867685d49bd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:32:50 crc kubenswrapper[4788]: I1211 10:32:50.681057 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m7djn" Dec 11 10:32:50 crc kubenswrapper[4788]: I1211 10:32:50.732741 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m7djn"] Dec 11 10:32:50 crc kubenswrapper[4788]: I1211 10:32:50.742113 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-m7djn"] Dec 11 10:32:52 crc kubenswrapper[4788]: I1211 10:32:52.507113 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b4d636e-0726-4baf-8ae3-4867685d49bd" path="/var/lib/kubelet/pods/3b4d636e-0726-4baf-8ae3-4867685d49bd/volumes" Dec 11 10:33:21 crc kubenswrapper[4788]: I1211 10:33:21.369209 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:33:21 crc kubenswrapper[4788]: I1211 10:33:21.369798 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:33:51 crc kubenswrapper[4788]: I1211 10:33:51.368853 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:33:51 crc kubenswrapper[4788]: I1211 10:33:51.369381 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:34:20 crc kubenswrapper[4788]: I1211 10:34:20.625726 4788 generic.go:334] "Generic (PLEG): container finished" podID="45555a47-8e19-4bcd-8ec0-44e682b2d81c" containerID="2f2477e7db9e9747925d69488506f9784eea0c158d33b6cf791a419a57d4b68e" exitCode=0 Dec 11 10:34:20 crc kubenswrapper[4788]: I1211 10:34:20.625816 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nlbjk/must-gather-jrgr8" event={"ID":"45555a47-8e19-4bcd-8ec0-44e682b2d81c","Type":"ContainerDied","Data":"2f2477e7db9e9747925d69488506f9784eea0c158d33b6cf791a419a57d4b68e"} Dec 11 10:34:20 crc kubenswrapper[4788]: I1211 10:34:20.626927 4788 scope.go:117] "RemoveContainer" containerID="2f2477e7db9e9747925d69488506f9784eea0c158d33b6cf791a419a57d4b68e" Dec 11 10:34:21 crc kubenswrapper[4788]: I1211 10:34:20.999965 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nlbjk_must-gather-jrgr8_45555a47-8e19-4bcd-8ec0-44e682b2d81c/gather/0.log" Dec 11 10:34:21 crc kubenswrapper[4788]: I1211 10:34:21.368945 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:34:21 crc kubenswrapper[4788]: I1211 10:34:21.369010 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:34:21 crc kubenswrapper[4788]: I1211 10:34:21.369236 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 10:34:21 crc kubenswrapper[4788]: I1211 10:34:21.370121 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:34:21 crc kubenswrapper[4788]: I1211 10:34:21.370182 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" gracePeriod=600 Dec 11 10:34:21 crc kubenswrapper[4788]: E1211 10:34:21.489176 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:34:21 crc kubenswrapper[4788]: I1211 10:34:21.638985 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" exitCode=0 Dec 11 10:34:21 crc kubenswrapper[4788]: I1211 10:34:21.639030 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c"} Dec 11 10:34:21 crc kubenswrapper[4788]: I1211 10:34:21.639068 4788 scope.go:117] "RemoveContainer" containerID="875931243e5fca3f1d8e6275cbb949ff735c64d729ac906beafebb86ab24bc38" Dec 11 10:34:21 crc kubenswrapper[4788]: I1211 10:34:21.639857 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:34:21 crc kubenswrapper[4788]: E1211 10:34:21.640161 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:34:28 crc kubenswrapper[4788]: I1211 10:34:28.506393 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nlbjk/must-gather-jrgr8"] Dec 11 10:34:28 crc kubenswrapper[4788]: I1211 10:34:28.507171 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-nlbjk/must-gather-jrgr8" podUID="45555a47-8e19-4bcd-8ec0-44e682b2d81c" containerName="copy" containerID="cri-o://72719bd7cc910eb6548c34d63a30b0c10c8731a159265ce187e49d52b903ea3a" gracePeriod=2 Dec 11 10:34:28 crc kubenswrapper[4788]: I1211 10:34:28.515803 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nlbjk/must-gather-jrgr8"] Dec 11 10:34:28 crc kubenswrapper[4788]: I1211 10:34:28.925153 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nlbjk_must-gather-jrgr8_45555a47-8e19-4bcd-8ec0-44e682b2d81c/copy/0.log" Dec 11 10:34:28 crc kubenswrapper[4788]: I1211 10:34:28.926621 4788 generic.go:334] "Generic (PLEG): container finished" podID="45555a47-8e19-4bcd-8ec0-44e682b2d81c" containerID="72719bd7cc910eb6548c34d63a30b0c10c8731a159265ce187e49d52b903ea3a" exitCode=143 Dec 11 10:34:28 crc kubenswrapper[4788]: I1211 10:34:28.926762 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c1a594898b115514b1639695a030112d08bf0eafde8877eee0bca38fb2b860c" Dec 11 10:34:28 crc kubenswrapper[4788]: I1211 10:34:28.961618 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nlbjk_must-gather-jrgr8_45555a47-8e19-4bcd-8ec0-44e682b2d81c/copy/0.log" Dec 11 10:34:28 crc kubenswrapper[4788]: I1211 10:34:28.962136 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/must-gather-jrgr8" Dec 11 10:34:29 crc kubenswrapper[4788]: I1211 10:34:29.035396 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/45555a47-8e19-4bcd-8ec0-44e682b2d81c-must-gather-output\") pod \"45555a47-8e19-4bcd-8ec0-44e682b2d81c\" (UID: \"45555a47-8e19-4bcd-8ec0-44e682b2d81c\") " Dec 11 10:34:29 crc kubenswrapper[4788]: I1211 10:34:29.035520 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zcgx\" (UniqueName: \"kubernetes.io/projected/45555a47-8e19-4bcd-8ec0-44e682b2d81c-kube-api-access-7zcgx\") pod \"45555a47-8e19-4bcd-8ec0-44e682b2d81c\" (UID: \"45555a47-8e19-4bcd-8ec0-44e682b2d81c\") " Dec 11 10:34:29 crc kubenswrapper[4788]: I1211 10:34:29.043307 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45555a47-8e19-4bcd-8ec0-44e682b2d81c-kube-api-access-7zcgx" (OuterVolumeSpecName: "kube-api-access-7zcgx") pod "45555a47-8e19-4bcd-8ec0-44e682b2d81c" (UID: "45555a47-8e19-4bcd-8ec0-44e682b2d81c"). InnerVolumeSpecName "kube-api-access-7zcgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:34:29 crc kubenswrapper[4788]: I1211 10:34:29.137836 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zcgx\" (UniqueName: \"kubernetes.io/projected/45555a47-8e19-4bcd-8ec0-44e682b2d81c-kube-api-access-7zcgx\") on node \"crc\" DevicePath \"\"" Dec 11 10:34:29 crc kubenswrapper[4788]: I1211 10:34:29.190064 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45555a47-8e19-4bcd-8ec0-44e682b2d81c-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "45555a47-8e19-4bcd-8ec0-44e682b2d81c" (UID: "45555a47-8e19-4bcd-8ec0-44e682b2d81c"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:34:29 crc kubenswrapper[4788]: I1211 10:34:29.240037 4788 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/45555a47-8e19-4bcd-8ec0-44e682b2d81c-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 11 10:34:29 crc kubenswrapper[4788]: I1211 10:34:29.939546 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nlbjk/must-gather-jrgr8" Dec 11 10:34:30 crc kubenswrapper[4788]: I1211 10:34:30.511900 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45555a47-8e19-4bcd-8ec0-44e682b2d81c" path="/var/lib/kubelet/pods/45555a47-8e19-4bcd-8ec0-44e682b2d81c/volumes" Dec 11 10:34:36 crc kubenswrapper[4788]: I1211 10:34:36.495899 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:34:36 crc kubenswrapper[4788]: E1211 10:34:36.496670 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:34:47 crc kubenswrapper[4788]: I1211 10:34:47.752312 4788 scope.go:117] "RemoveContainer" containerID="2f2477e7db9e9747925d69488506f9784eea0c158d33b6cf791a419a57d4b68e" Dec 11 10:34:47 crc kubenswrapper[4788]: I1211 10:34:47.818320 4788 scope.go:117] "RemoveContainer" containerID="72719bd7cc910eb6548c34d63a30b0c10c8731a159265ce187e49d52b903ea3a" Dec 11 10:34:51 crc kubenswrapper[4788]: I1211 10:34:51.496712 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:34:51 crc kubenswrapper[4788]: E1211 10:34:51.498844 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:35:05 crc kubenswrapper[4788]: I1211 10:35:05.496006 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:35:05 crc kubenswrapper[4788]: E1211 10:35:05.496890 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:35:19 crc kubenswrapper[4788]: I1211 10:35:19.495662 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:35:19 crc kubenswrapper[4788]: E1211 10:35:19.496491 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:35:33 crc kubenswrapper[4788]: I1211 10:35:33.496630 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:35:33 crc kubenswrapper[4788]: E1211 10:35:33.498505 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:35:47 crc kubenswrapper[4788]: I1211 10:35:47.496761 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:35:47 crc kubenswrapper[4788]: E1211 10:35:47.497595 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:35:47 crc kubenswrapper[4788]: I1211 10:35:47.862681 4788 scope.go:117] "RemoveContainer" containerID="40d635e36505a9281f136e8a51c3854e90b968a4f76a1b576a2320449e31f783" Dec 11 10:36:01 crc kubenswrapper[4788]: I1211 10:36:01.496773 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:36:01 crc kubenswrapper[4788]: E1211 10:36:01.498677 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:36:16 crc kubenswrapper[4788]: I1211 10:36:16.496326 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:36:16 crc kubenswrapper[4788]: E1211 10:36:16.498742 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:36:27 crc kubenswrapper[4788]: I1211 10:36:27.495822 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:36:27 crc kubenswrapper[4788]: E1211 10:36:27.496836 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:36:41 crc kubenswrapper[4788]: I1211 10:36:41.496826 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:36:41 crc kubenswrapper[4788]: E1211 10:36:41.498493 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:36:46 crc kubenswrapper[4788]: I1211 10:36:46.891930 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6zlwc"] Dec 11 10:36:46 crc kubenswrapper[4788]: E1211 10:36:46.892903 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45555a47-8e19-4bcd-8ec0-44e682b2d81c" containerName="copy" Dec 11 10:36:46 crc kubenswrapper[4788]: I1211 10:36:46.892919 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="45555a47-8e19-4bcd-8ec0-44e682b2d81c" containerName="copy" Dec 11 10:36:46 crc kubenswrapper[4788]: E1211 10:36:46.892950 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45555a47-8e19-4bcd-8ec0-44e682b2d81c" containerName="gather" Dec 11 10:36:46 crc kubenswrapper[4788]: I1211 10:36:46.892955 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="45555a47-8e19-4bcd-8ec0-44e682b2d81c" containerName="gather" Dec 11 10:36:46 crc kubenswrapper[4788]: E1211 10:36:46.892970 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b4d636e-0726-4baf-8ae3-4867685d49bd" containerName="registry-server" Dec 11 10:36:46 crc kubenswrapper[4788]: I1211 10:36:46.892976 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b4d636e-0726-4baf-8ae3-4867685d49bd" containerName="registry-server" Dec 11 10:36:46 crc kubenswrapper[4788]: E1211 10:36:46.892984 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b4d636e-0726-4baf-8ae3-4867685d49bd" containerName="extract-content" Dec 11 10:36:46 crc kubenswrapper[4788]: I1211 10:36:46.892989 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b4d636e-0726-4baf-8ae3-4867685d49bd" containerName="extract-content" Dec 11 10:36:46 crc kubenswrapper[4788]: E1211 10:36:46.893002 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b4d636e-0726-4baf-8ae3-4867685d49bd" containerName="extract-utilities" Dec 11 10:36:46 crc kubenswrapper[4788]: I1211 10:36:46.893008 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b4d636e-0726-4baf-8ae3-4867685d49bd" containerName="extract-utilities" Dec 11 10:36:46 crc kubenswrapper[4788]: I1211 10:36:46.893179 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="45555a47-8e19-4bcd-8ec0-44e682b2d81c" containerName="gather" Dec 11 10:36:46 crc kubenswrapper[4788]: I1211 10:36:46.893204 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="45555a47-8e19-4bcd-8ec0-44e682b2d81c" containerName="copy" Dec 11 10:36:46 crc kubenswrapper[4788]: I1211 10:36:46.893216 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b4d636e-0726-4baf-8ae3-4867685d49bd" containerName="registry-server" Dec 11 10:36:46 crc kubenswrapper[4788]: I1211 10:36:46.894649 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:46 crc kubenswrapper[4788]: I1211 10:36:46.907256 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6zlwc"] Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.040009 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-utilities\") pod \"certified-operators-6zlwc\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.040745 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wq6j\" (UniqueName: \"kubernetes.io/projected/a6f43171-c2e6-4b66-8e52-d09bc5285c61-kube-api-access-6wq6j\") pod \"certified-operators-6zlwc\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.040887 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-catalog-content\") pod \"certified-operators-6zlwc\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.142843 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-utilities\") pod \"certified-operators-6zlwc\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.142929 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wq6j\" (UniqueName: \"kubernetes.io/projected/a6f43171-c2e6-4b66-8e52-d09bc5285c61-kube-api-access-6wq6j\") pod \"certified-operators-6zlwc\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.142962 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-catalog-content\") pod \"certified-operators-6zlwc\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.143502 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-catalog-content\") pod \"certified-operators-6zlwc\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.143504 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-utilities\") pod \"certified-operators-6zlwc\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.168293 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wq6j\" (UniqueName: \"kubernetes.io/projected/a6f43171-c2e6-4b66-8e52-d09bc5285c61-kube-api-access-6wq6j\") pod \"certified-operators-6zlwc\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.217762 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.736963 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6zlwc"] Dec 11 10:36:47 crc kubenswrapper[4788]: I1211 10:36:47.938689 4788 scope.go:117] "RemoveContainer" containerID="288426521a956b3167bca92806dad6d5f5ed6bbf81b1fdaecae995c035cfd9de" Dec 11 10:36:48 crc kubenswrapper[4788]: I1211 10:36:48.225816 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zlwc" event={"ID":"a6f43171-c2e6-4b66-8e52-d09bc5285c61","Type":"ContainerStarted","Data":"9dc8bca20edb6bf0409a26c21846814fa6459de0201a0f80bc4cd4c408db1e03"} Dec 11 10:36:49 crc kubenswrapper[4788]: I1211 10:36:49.235818 4788 generic.go:334] "Generic (PLEG): container finished" podID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" containerID="5f86d10ee62950f276a2ee24e23fbb7e8730e7dc93fb9b82562895f7dd3dfa2f" exitCode=0 Dec 11 10:36:49 crc kubenswrapper[4788]: I1211 10:36:49.235867 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zlwc" event={"ID":"a6f43171-c2e6-4b66-8e52-d09bc5285c61","Type":"ContainerDied","Data":"5f86d10ee62950f276a2ee24e23fbb7e8730e7dc93fb9b82562895f7dd3dfa2f"} Dec 11 10:36:51 crc kubenswrapper[4788]: I1211 10:36:51.261071 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zlwc" event={"ID":"a6f43171-c2e6-4b66-8e52-d09bc5285c61","Type":"ContainerStarted","Data":"adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e"} Dec 11 10:36:52 crc kubenswrapper[4788]: I1211 10:36:52.274210 4788 generic.go:334] "Generic (PLEG): container finished" podID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" containerID="adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e" exitCode=0 Dec 11 10:36:52 crc kubenswrapper[4788]: I1211 10:36:52.274267 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zlwc" event={"ID":"a6f43171-c2e6-4b66-8e52-d09bc5285c61","Type":"ContainerDied","Data":"adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e"} Dec 11 10:36:53 crc kubenswrapper[4788]: I1211 10:36:53.286208 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zlwc" event={"ID":"a6f43171-c2e6-4b66-8e52-d09bc5285c61","Type":"ContainerStarted","Data":"c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3"} Dec 11 10:36:53 crc kubenswrapper[4788]: I1211 10:36:53.308147 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6zlwc" podStartSLOduration=3.68456998 podStartE2EDuration="7.308121233s" podCreationTimestamp="2025-12-11 10:36:46 +0000 UTC" firstStartedPulling="2025-12-11 10:36:49.239347097 +0000 UTC m=+4539.310126683" lastFinishedPulling="2025-12-11 10:36:52.86289836 +0000 UTC m=+4542.933677936" observedRunningTime="2025-12-11 10:36:53.30248986 +0000 UTC m=+4543.373269476" watchObservedRunningTime="2025-12-11 10:36:53.308121233 +0000 UTC m=+4543.378900819" Dec 11 10:36:54 crc kubenswrapper[4788]: I1211 10:36:54.495948 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:36:54 crc kubenswrapper[4788]: E1211 10:36:54.496566 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:36:57 crc kubenswrapper[4788]: I1211 10:36:57.218405 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:57 crc kubenswrapper[4788]: I1211 10:36:57.219915 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:36:57 crc kubenswrapper[4788]: I1211 10:36:57.267705 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:37:07 crc kubenswrapper[4788]: I1211 10:37:07.274322 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:37:07 crc kubenswrapper[4788]: I1211 10:37:07.330486 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6zlwc"] Dec 11 10:37:07 crc kubenswrapper[4788]: I1211 10:37:07.419305 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6zlwc" podUID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" containerName="registry-server" containerID="cri-o://c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3" gracePeriod=2 Dec 11 10:37:07 crc kubenswrapper[4788]: I1211 10:37:07.846487 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:37:07 crc kubenswrapper[4788]: I1211 10:37:07.989977 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wq6j\" (UniqueName: \"kubernetes.io/projected/a6f43171-c2e6-4b66-8e52-d09bc5285c61-kube-api-access-6wq6j\") pod \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " Dec 11 10:37:07 crc kubenswrapper[4788]: I1211 10:37:07.990177 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-utilities\") pod \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " Dec 11 10:37:07 crc kubenswrapper[4788]: I1211 10:37:07.990251 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-catalog-content\") pod \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\" (UID: \"a6f43171-c2e6-4b66-8e52-d09bc5285c61\") " Dec 11 10:37:07 crc kubenswrapper[4788]: I1211 10:37:07.992037 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-utilities" (OuterVolumeSpecName: "utilities") pod "a6f43171-c2e6-4b66-8e52-d09bc5285c61" (UID: "a6f43171-c2e6-4b66-8e52-d09bc5285c61"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.004156 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6f43171-c2e6-4b66-8e52-d09bc5285c61-kube-api-access-6wq6j" (OuterVolumeSpecName: "kube-api-access-6wq6j") pod "a6f43171-c2e6-4b66-8e52-d09bc5285c61" (UID: "a6f43171-c2e6-4b66-8e52-d09bc5285c61"). InnerVolumeSpecName "kube-api-access-6wq6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.053110 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a6f43171-c2e6-4b66-8e52-d09bc5285c61" (UID: "a6f43171-c2e6-4b66-8e52-d09bc5285c61"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.092131 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.092201 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wq6j\" (UniqueName: \"kubernetes.io/projected/a6f43171-c2e6-4b66-8e52-d09bc5285c61-kube-api-access-6wq6j\") on node \"crc\" DevicePath \"\"" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.092214 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6f43171-c2e6-4b66-8e52-d09bc5285c61-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.436386 4788 generic.go:334] "Generic (PLEG): container finished" podID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" containerID="c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3" exitCode=0 Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.436447 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zlwc" event={"ID":"a6f43171-c2e6-4b66-8e52-d09bc5285c61","Type":"ContainerDied","Data":"c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3"} Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.436466 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6zlwc" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.436499 4788 scope.go:117] "RemoveContainer" containerID="c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.436484 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6zlwc" event={"ID":"a6f43171-c2e6-4b66-8e52-d09bc5285c61","Type":"ContainerDied","Data":"9dc8bca20edb6bf0409a26c21846814fa6459de0201a0f80bc4cd4c408db1e03"} Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.458162 4788 scope.go:117] "RemoveContainer" containerID="adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.476145 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6zlwc"] Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.488405 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6zlwc"] Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.494566 4788 scope.go:117] "RemoveContainer" containerID="5f86d10ee62950f276a2ee24e23fbb7e8730e7dc93fb9b82562895f7dd3dfa2f" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.495676 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:37:08 crc kubenswrapper[4788]: E1211 10:37:08.496187 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.539407 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" path="/var/lib/kubelet/pods/a6f43171-c2e6-4b66-8e52-d09bc5285c61/volumes" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.541486 4788 scope.go:117] "RemoveContainer" containerID="c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3" Dec 11 10:37:08 crc kubenswrapper[4788]: E1211 10:37:08.542020 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3\": container with ID starting with c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3 not found: ID does not exist" containerID="c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.542062 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3"} err="failed to get container status \"c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3\": rpc error: code = NotFound desc = could not find container \"c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3\": container with ID starting with c0f9621746911824971c0c5fc10d5c9394b26fc57485a396aac068cca35286a3 not found: ID does not exist" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.542088 4788 scope.go:117] "RemoveContainer" containerID="adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e" Dec 11 10:37:08 crc kubenswrapper[4788]: E1211 10:37:08.542515 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e\": container with ID starting with adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e not found: ID does not exist" containerID="adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.542542 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e"} err="failed to get container status \"adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e\": rpc error: code = NotFound desc = could not find container \"adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e\": container with ID starting with adeba15f856f4a7d10f8be5915c27bff0e8c492c2acdb26634966100d988f27e not found: ID does not exist" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.542556 4788 scope.go:117] "RemoveContainer" containerID="5f86d10ee62950f276a2ee24e23fbb7e8730e7dc93fb9b82562895f7dd3dfa2f" Dec 11 10:37:08 crc kubenswrapper[4788]: E1211 10:37:08.543212 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f86d10ee62950f276a2ee24e23fbb7e8730e7dc93fb9b82562895f7dd3dfa2f\": container with ID starting with 5f86d10ee62950f276a2ee24e23fbb7e8730e7dc93fb9b82562895f7dd3dfa2f not found: ID does not exist" containerID="5f86d10ee62950f276a2ee24e23fbb7e8730e7dc93fb9b82562895f7dd3dfa2f" Dec 11 10:37:08 crc kubenswrapper[4788]: I1211 10:37:08.543303 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f86d10ee62950f276a2ee24e23fbb7e8730e7dc93fb9b82562895f7dd3dfa2f"} err="failed to get container status \"5f86d10ee62950f276a2ee24e23fbb7e8730e7dc93fb9b82562895f7dd3dfa2f\": rpc error: code = NotFound desc = could not find container \"5f86d10ee62950f276a2ee24e23fbb7e8730e7dc93fb9b82562895f7dd3dfa2f\": container with ID starting with 5f86d10ee62950f276a2ee24e23fbb7e8730e7dc93fb9b82562895f7dd3dfa2f not found: ID does not exist" Dec 11 10:37:08 crc kubenswrapper[4788]: E1211 10:37:08.632680 4788 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6f43171_c2e6_4b66_8e52_d09bc5285c61.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6f43171_c2e6_4b66_8e52_d09bc5285c61.slice/crio-9dc8bca20edb6bf0409a26c21846814fa6459de0201a0f80bc4cd4c408db1e03\": RecentStats: unable to find data in memory cache]" Dec 11 10:37:23 crc kubenswrapper[4788]: I1211 10:37:23.495486 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:37:23 crc kubenswrapper[4788]: E1211 10:37:23.496293 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.125364 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-74tc8/must-gather-p6r8v"] Dec 11 10:37:24 crc kubenswrapper[4788]: E1211 10:37:24.126178 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" containerName="registry-server" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.126199 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" containerName="registry-server" Dec 11 10:37:24 crc kubenswrapper[4788]: E1211 10:37:24.126214 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" containerName="extract-utilities" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.126222 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" containerName="extract-utilities" Dec 11 10:37:24 crc kubenswrapper[4788]: E1211 10:37:24.126266 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" containerName="extract-content" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.126276 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" containerName="extract-content" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.126668 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6f43171-c2e6-4b66-8e52-d09bc5285c61" containerName="registry-server" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.128059 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/must-gather-p6r8v" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.134702 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-74tc8"/"kube-root-ca.crt" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.134984 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-74tc8"/"openshift-service-ca.crt" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.145186 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-74tc8/must-gather-p6r8v"] Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.269379 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg4ht\" (UniqueName: \"kubernetes.io/projected/402d07ff-50bf-4015-b80f-46a55aeac97c-kube-api-access-lg4ht\") pod \"must-gather-p6r8v\" (UID: \"402d07ff-50bf-4015-b80f-46a55aeac97c\") " pod="openshift-must-gather-74tc8/must-gather-p6r8v" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.269841 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/402d07ff-50bf-4015-b80f-46a55aeac97c-must-gather-output\") pod \"must-gather-p6r8v\" (UID: \"402d07ff-50bf-4015-b80f-46a55aeac97c\") " pod="openshift-must-gather-74tc8/must-gather-p6r8v" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.372081 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/402d07ff-50bf-4015-b80f-46a55aeac97c-must-gather-output\") pod \"must-gather-p6r8v\" (UID: \"402d07ff-50bf-4015-b80f-46a55aeac97c\") " pod="openshift-must-gather-74tc8/must-gather-p6r8v" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.372217 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg4ht\" (UniqueName: \"kubernetes.io/projected/402d07ff-50bf-4015-b80f-46a55aeac97c-kube-api-access-lg4ht\") pod \"must-gather-p6r8v\" (UID: \"402d07ff-50bf-4015-b80f-46a55aeac97c\") " pod="openshift-must-gather-74tc8/must-gather-p6r8v" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.372781 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/402d07ff-50bf-4015-b80f-46a55aeac97c-must-gather-output\") pod \"must-gather-p6r8v\" (UID: \"402d07ff-50bf-4015-b80f-46a55aeac97c\") " pod="openshift-must-gather-74tc8/must-gather-p6r8v" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.396323 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg4ht\" (UniqueName: \"kubernetes.io/projected/402d07ff-50bf-4015-b80f-46a55aeac97c-kube-api-access-lg4ht\") pod \"must-gather-p6r8v\" (UID: \"402d07ff-50bf-4015-b80f-46a55aeac97c\") " pod="openshift-must-gather-74tc8/must-gather-p6r8v" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.448891 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/must-gather-p6r8v" Dec 11 10:37:24 crc kubenswrapper[4788]: I1211 10:37:24.898430 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-74tc8/must-gather-p6r8v"] Dec 11 10:37:25 crc kubenswrapper[4788]: I1211 10:37:25.628707 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-74tc8/must-gather-p6r8v" event={"ID":"402d07ff-50bf-4015-b80f-46a55aeac97c","Type":"ContainerStarted","Data":"9f09cf1fc579d578939220bd2a5423b0928bf3fb66f23a93cd5c5dcd49a99a1e"} Dec 11 10:37:25 crc kubenswrapper[4788]: I1211 10:37:25.629450 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-74tc8/must-gather-p6r8v" event={"ID":"402d07ff-50bf-4015-b80f-46a55aeac97c","Type":"ContainerStarted","Data":"d645d9ebed8f88ebef41ce8df67711343adefd1e16044883e03d030ef2c7bd0c"} Dec 11 10:37:25 crc kubenswrapper[4788]: I1211 10:37:25.629470 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-74tc8/must-gather-p6r8v" event={"ID":"402d07ff-50bf-4015-b80f-46a55aeac97c","Type":"ContainerStarted","Data":"61f4e5880293dde7228744dcfb40e3f9e71fd135761d4bbf955aeed7e7b94a48"} Dec 11 10:37:25 crc kubenswrapper[4788]: I1211 10:37:25.648469 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-74tc8/must-gather-p6r8v" podStartSLOduration=1.648443683 podStartE2EDuration="1.648443683s" podCreationTimestamp="2025-12-11 10:37:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:37:25.642592905 +0000 UTC m=+4575.713372501" watchObservedRunningTime="2025-12-11 10:37:25.648443683 +0000 UTC m=+4575.719223269" Dec 11 10:37:28 crc kubenswrapper[4788]: E1211 10:37:28.396502 4788 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.238:36560->38.129.56.238:42697: write tcp 38.129.56.238:36560->38.129.56.238:42697: write: broken pipe Dec 11 10:37:29 crc kubenswrapper[4788]: I1211 10:37:29.147704 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-74tc8/crc-debug-qnkv6"] Dec 11 10:37:29 crc kubenswrapper[4788]: I1211 10:37:29.149616 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-qnkv6" Dec 11 10:37:29 crc kubenswrapper[4788]: I1211 10:37:29.154498 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-74tc8"/"default-dockercfg-w8tfs" Dec 11 10:37:29 crc kubenswrapper[4788]: I1211 10:37:29.272960 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxnfw\" (UniqueName: \"kubernetes.io/projected/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-kube-api-access-sxnfw\") pod \"crc-debug-qnkv6\" (UID: \"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca\") " pod="openshift-must-gather-74tc8/crc-debug-qnkv6" Dec 11 10:37:29 crc kubenswrapper[4788]: I1211 10:37:29.273152 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-host\") pod \"crc-debug-qnkv6\" (UID: \"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca\") " pod="openshift-must-gather-74tc8/crc-debug-qnkv6" Dec 11 10:37:29 crc kubenswrapper[4788]: I1211 10:37:29.375278 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxnfw\" (UniqueName: \"kubernetes.io/projected/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-kube-api-access-sxnfw\") pod \"crc-debug-qnkv6\" (UID: \"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca\") " pod="openshift-must-gather-74tc8/crc-debug-qnkv6" Dec 11 10:37:29 crc kubenswrapper[4788]: I1211 10:37:29.375456 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-host\") pod \"crc-debug-qnkv6\" (UID: \"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca\") " pod="openshift-must-gather-74tc8/crc-debug-qnkv6" Dec 11 10:37:29 crc kubenswrapper[4788]: I1211 10:37:29.375670 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-host\") pod \"crc-debug-qnkv6\" (UID: \"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca\") " pod="openshift-must-gather-74tc8/crc-debug-qnkv6" Dec 11 10:37:29 crc kubenswrapper[4788]: I1211 10:37:29.394670 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxnfw\" (UniqueName: \"kubernetes.io/projected/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-kube-api-access-sxnfw\") pod \"crc-debug-qnkv6\" (UID: \"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca\") " pod="openshift-must-gather-74tc8/crc-debug-qnkv6" Dec 11 10:37:29 crc kubenswrapper[4788]: I1211 10:37:29.481670 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-qnkv6" Dec 11 10:37:29 crc kubenswrapper[4788]: W1211 10:37:29.528000 4788 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3b96be5_eae3_4d5b_9753_c4f5b3d61bca.slice/crio-6f7a6db1a4d3f264f1f7aa179b050d82f3b3b345ad08a27dad3d65c2cfdfd2a9 WatchSource:0}: Error finding container 6f7a6db1a4d3f264f1f7aa179b050d82f3b3b345ad08a27dad3d65c2cfdfd2a9: Status 404 returned error can't find the container with id 6f7a6db1a4d3f264f1f7aa179b050d82f3b3b345ad08a27dad3d65c2cfdfd2a9 Dec 11 10:37:29 crc kubenswrapper[4788]: I1211 10:37:29.675751 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-74tc8/crc-debug-qnkv6" event={"ID":"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca","Type":"ContainerStarted","Data":"6f7a6db1a4d3f264f1f7aa179b050d82f3b3b345ad08a27dad3d65c2cfdfd2a9"} Dec 11 10:37:30 crc kubenswrapper[4788]: I1211 10:37:30.686125 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-74tc8/crc-debug-qnkv6" event={"ID":"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca","Type":"ContainerStarted","Data":"21ab95ac37fc52ee42bce14372dbf8e0f02bc927c87b7a594fda4a1e20cbe045"} Dec 11 10:37:30 crc kubenswrapper[4788]: I1211 10:37:30.705935 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-74tc8/crc-debug-qnkv6" podStartSLOduration=1.705908269 podStartE2EDuration="1.705908269s" podCreationTimestamp="2025-12-11 10:37:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:37:30.699902237 +0000 UTC m=+4580.770681823" watchObservedRunningTime="2025-12-11 10:37:30.705908269 +0000 UTC m=+4580.776687855" Dec 11 10:37:37 crc kubenswrapper[4788]: I1211 10:37:37.496305 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:37:37 crc kubenswrapper[4788]: E1211 10:37:37.497159 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:37:49 crc kubenswrapper[4788]: I1211 10:37:49.496475 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:37:49 crc kubenswrapper[4788]: E1211 10:37:49.497336 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:38:02 crc kubenswrapper[4788]: I1211 10:38:02.496683 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:38:02 crc kubenswrapper[4788]: E1211 10:38:02.497526 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:38:04 crc kubenswrapper[4788]: I1211 10:38:04.012175 4788 generic.go:334] "Generic (PLEG): container finished" podID="f3b96be5-eae3-4d5b-9753-c4f5b3d61bca" containerID="21ab95ac37fc52ee42bce14372dbf8e0f02bc927c87b7a594fda4a1e20cbe045" exitCode=0 Dec 11 10:38:04 crc kubenswrapper[4788]: I1211 10:38:04.012275 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-74tc8/crc-debug-qnkv6" event={"ID":"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca","Type":"ContainerDied","Data":"21ab95ac37fc52ee42bce14372dbf8e0f02bc927c87b7a594fda4a1e20cbe045"} Dec 11 10:38:05 crc kubenswrapper[4788]: I1211 10:38:05.119163 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-qnkv6" Dec 11 10:38:05 crc kubenswrapper[4788]: I1211 10:38:05.151563 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-74tc8/crc-debug-qnkv6"] Dec 11 10:38:05 crc kubenswrapper[4788]: I1211 10:38:05.162100 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-74tc8/crc-debug-qnkv6"] Dec 11 10:38:05 crc kubenswrapper[4788]: I1211 10:38:05.270843 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-host\") pod \"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca\" (UID: \"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca\") " Dec 11 10:38:05 crc kubenswrapper[4788]: I1211 10:38:05.270972 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-host" (OuterVolumeSpecName: "host") pod "f3b96be5-eae3-4d5b-9753-c4f5b3d61bca" (UID: "f3b96be5-eae3-4d5b-9753-c4f5b3d61bca"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:38:05 crc kubenswrapper[4788]: I1211 10:38:05.271075 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxnfw\" (UniqueName: \"kubernetes.io/projected/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-kube-api-access-sxnfw\") pod \"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca\" (UID: \"f3b96be5-eae3-4d5b-9753-c4f5b3d61bca\") " Dec 11 10:38:05 crc kubenswrapper[4788]: I1211 10:38:05.271576 4788 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-host\") on node \"crc\" DevicePath \"\"" Dec 11 10:38:05 crc kubenswrapper[4788]: I1211 10:38:05.278341 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-kube-api-access-sxnfw" (OuterVolumeSpecName: "kube-api-access-sxnfw") pod "f3b96be5-eae3-4d5b-9753-c4f5b3d61bca" (UID: "f3b96be5-eae3-4d5b-9753-c4f5b3d61bca"). InnerVolumeSpecName "kube-api-access-sxnfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:38:05 crc kubenswrapper[4788]: I1211 10:38:05.373208 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxnfw\" (UniqueName: \"kubernetes.io/projected/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca-kube-api-access-sxnfw\") on node \"crc\" DevicePath \"\"" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.032599 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f7a6db1a4d3f264f1f7aa179b050d82f3b3b345ad08a27dad3d65c2cfdfd2a9" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.032657 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-qnkv6" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.324842 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-74tc8/crc-debug-vmwgr"] Dec 11 10:38:06 crc kubenswrapper[4788]: E1211 10:38:06.325292 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3b96be5-eae3-4d5b-9753-c4f5b3d61bca" containerName="container-00" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.325306 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3b96be5-eae3-4d5b-9753-c4f5b3d61bca" containerName="container-00" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.325507 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3b96be5-eae3-4d5b-9753-c4f5b3d61bca" containerName="container-00" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.326178 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-vmwgr" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.328686 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-74tc8"/"default-dockercfg-w8tfs" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.495670 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrwnt\" (UniqueName: \"kubernetes.io/projected/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-kube-api-access-xrwnt\") pod \"crc-debug-vmwgr\" (UID: \"85db9e36-0165-4dd8-bea6-7bc0fb9201c6\") " pod="openshift-must-gather-74tc8/crc-debug-vmwgr" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.495977 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-host\") pod \"crc-debug-vmwgr\" (UID: \"85db9e36-0165-4dd8-bea6-7bc0fb9201c6\") " pod="openshift-must-gather-74tc8/crc-debug-vmwgr" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.505746 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3b96be5-eae3-4d5b-9753-c4f5b3d61bca" path="/var/lib/kubelet/pods/f3b96be5-eae3-4d5b-9753-c4f5b3d61bca/volumes" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.597992 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrwnt\" (UniqueName: \"kubernetes.io/projected/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-kube-api-access-xrwnt\") pod \"crc-debug-vmwgr\" (UID: \"85db9e36-0165-4dd8-bea6-7bc0fb9201c6\") " pod="openshift-must-gather-74tc8/crc-debug-vmwgr" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.598332 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-host\") pod \"crc-debug-vmwgr\" (UID: \"85db9e36-0165-4dd8-bea6-7bc0fb9201c6\") " pod="openshift-must-gather-74tc8/crc-debug-vmwgr" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.598471 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-host\") pod \"crc-debug-vmwgr\" (UID: \"85db9e36-0165-4dd8-bea6-7bc0fb9201c6\") " pod="openshift-must-gather-74tc8/crc-debug-vmwgr" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.623119 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrwnt\" (UniqueName: \"kubernetes.io/projected/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-kube-api-access-xrwnt\") pod \"crc-debug-vmwgr\" (UID: \"85db9e36-0165-4dd8-bea6-7bc0fb9201c6\") " pod="openshift-must-gather-74tc8/crc-debug-vmwgr" Dec 11 10:38:06 crc kubenswrapper[4788]: I1211 10:38:06.645293 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-vmwgr" Dec 11 10:38:07 crc kubenswrapper[4788]: I1211 10:38:07.043246 4788 generic.go:334] "Generic (PLEG): container finished" podID="85db9e36-0165-4dd8-bea6-7bc0fb9201c6" containerID="fbfb7568971ce57e58b974dabbceffa94556b5c28e0131047923ed0610ee6de0" exitCode=0 Dec 11 10:38:07 crc kubenswrapper[4788]: I1211 10:38:07.043340 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-74tc8/crc-debug-vmwgr" event={"ID":"85db9e36-0165-4dd8-bea6-7bc0fb9201c6","Type":"ContainerDied","Data":"fbfb7568971ce57e58b974dabbceffa94556b5c28e0131047923ed0610ee6de0"} Dec 11 10:38:07 crc kubenswrapper[4788]: I1211 10:38:07.043524 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-74tc8/crc-debug-vmwgr" event={"ID":"85db9e36-0165-4dd8-bea6-7bc0fb9201c6","Type":"ContainerStarted","Data":"26bcfa1c0361c019ebf394eca27fb44c1ed366087302ad5f57c789d8a371f6ed"} Dec 11 10:38:07 crc kubenswrapper[4788]: I1211 10:38:07.476597 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-74tc8/crc-debug-vmwgr"] Dec 11 10:38:07 crc kubenswrapper[4788]: I1211 10:38:07.488837 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-74tc8/crc-debug-vmwgr"] Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.148753 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-vmwgr" Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.330253 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrwnt\" (UniqueName: \"kubernetes.io/projected/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-kube-api-access-xrwnt\") pod \"85db9e36-0165-4dd8-bea6-7bc0fb9201c6\" (UID: \"85db9e36-0165-4dd8-bea6-7bc0fb9201c6\") " Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.330723 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-host\") pod \"85db9e36-0165-4dd8-bea6-7bc0fb9201c6\" (UID: \"85db9e36-0165-4dd8-bea6-7bc0fb9201c6\") " Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.330986 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-host" (OuterVolumeSpecName: "host") pod "85db9e36-0165-4dd8-bea6-7bc0fb9201c6" (UID: "85db9e36-0165-4dd8-bea6-7bc0fb9201c6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.331617 4788 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-host\") on node \"crc\" DevicePath \"\"" Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.346508 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-kube-api-access-xrwnt" (OuterVolumeSpecName: "kube-api-access-xrwnt") pod "85db9e36-0165-4dd8-bea6-7bc0fb9201c6" (UID: "85db9e36-0165-4dd8-bea6-7bc0fb9201c6"). InnerVolumeSpecName "kube-api-access-xrwnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.433134 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrwnt\" (UniqueName: \"kubernetes.io/projected/85db9e36-0165-4dd8-bea6-7bc0fb9201c6-kube-api-access-xrwnt\") on node \"crc\" DevicePath \"\"" Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.507396 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85db9e36-0165-4dd8-bea6-7bc0fb9201c6" path="/var/lib/kubelet/pods/85db9e36-0165-4dd8-bea6-7bc0fb9201c6/volumes" Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.776582 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-74tc8/crc-debug-qwhqq"] Dec 11 10:38:08 crc kubenswrapper[4788]: E1211 10:38:08.777276 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85db9e36-0165-4dd8-bea6-7bc0fb9201c6" containerName="container-00" Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.777484 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="85db9e36-0165-4dd8-bea6-7bc0fb9201c6" containerName="container-00" Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.777771 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="85db9e36-0165-4dd8-bea6-7bc0fb9201c6" containerName="container-00" Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.778484 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-qwhqq" Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.944800 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpbmc\" (UniqueName: \"kubernetes.io/projected/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-kube-api-access-jpbmc\") pod \"crc-debug-qwhqq\" (UID: \"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf\") " pod="openshift-must-gather-74tc8/crc-debug-qwhqq" Dec 11 10:38:08 crc kubenswrapper[4788]: I1211 10:38:08.944905 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-host\") pod \"crc-debug-qwhqq\" (UID: \"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf\") " pod="openshift-must-gather-74tc8/crc-debug-qwhqq" Dec 11 10:38:09 crc kubenswrapper[4788]: I1211 10:38:09.046652 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpbmc\" (UniqueName: \"kubernetes.io/projected/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-kube-api-access-jpbmc\") pod \"crc-debug-qwhqq\" (UID: \"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf\") " pod="openshift-must-gather-74tc8/crc-debug-qwhqq" Dec 11 10:38:09 crc kubenswrapper[4788]: I1211 10:38:09.047043 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-host\") pod \"crc-debug-qwhqq\" (UID: \"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf\") " pod="openshift-must-gather-74tc8/crc-debug-qwhqq" Dec 11 10:38:09 crc kubenswrapper[4788]: I1211 10:38:09.047293 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-host\") pod \"crc-debug-qwhqq\" (UID: \"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf\") " pod="openshift-must-gather-74tc8/crc-debug-qwhqq" Dec 11 10:38:09 crc kubenswrapper[4788]: I1211 10:38:09.067604 4788 scope.go:117] "RemoveContainer" containerID="fbfb7568971ce57e58b974dabbceffa94556b5c28e0131047923ed0610ee6de0" Dec 11 10:38:09 crc kubenswrapper[4788]: I1211 10:38:09.067622 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-vmwgr" Dec 11 10:38:09 crc kubenswrapper[4788]: I1211 10:38:09.071410 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpbmc\" (UniqueName: \"kubernetes.io/projected/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-kube-api-access-jpbmc\") pod \"crc-debug-qwhqq\" (UID: \"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf\") " pod="openshift-must-gather-74tc8/crc-debug-qwhqq" Dec 11 10:38:09 crc kubenswrapper[4788]: I1211 10:38:09.100416 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-qwhqq" Dec 11 10:38:10 crc kubenswrapper[4788]: I1211 10:38:10.077388 4788 generic.go:334] "Generic (PLEG): container finished" podID="1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf" containerID="29a1465ac03527733719f72dabd52c8c89f90d4b025f23613d09e7b8cbd43c7a" exitCode=0 Dec 11 10:38:10 crc kubenswrapper[4788]: I1211 10:38:10.077922 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-74tc8/crc-debug-qwhqq" event={"ID":"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf","Type":"ContainerDied","Data":"29a1465ac03527733719f72dabd52c8c89f90d4b025f23613d09e7b8cbd43c7a"} Dec 11 10:38:10 crc kubenswrapper[4788]: I1211 10:38:10.077951 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-74tc8/crc-debug-qwhqq" event={"ID":"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf","Type":"ContainerStarted","Data":"9affbafd27b47b29610797d74065a52444423e123b9a829b669df86cd3bc1332"} Dec 11 10:38:10 crc kubenswrapper[4788]: I1211 10:38:10.132514 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-74tc8/crc-debug-qwhqq"] Dec 11 10:38:10 crc kubenswrapper[4788]: I1211 10:38:10.141641 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-74tc8/crc-debug-qwhqq"] Dec 11 10:38:11 crc kubenswrapper[4788]: I1211 10:38:11.198705 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-qwhqq" Dec 11 10:38:11 crc kubenswrapper[4788]: I1211 10:38:11.290680 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpbmc\" (UniqueName: \"kubernetes.io/projected/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-kube-api-access-jpbmc\") pod \"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf\" (UID: \"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf\") " Dec 11 10:38:11 crc kubenswrapper[4788]: I1211 10:38:11.290953 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-host\") pod \"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf\" (UID: \"1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf\") " Dec 11 10:38:11 crc kubenswrapper[4788]: I1211 10:38:11.291130 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-host" (OuterVolumeSpecName: "host") pod "1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf" (UID: "1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 10:38:11 crc kubenswrapper[4788]: I1211 10:38:11.291837 4788 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-host\") on node \"crc\" DevicePath \"\"" Dec 11 10:38:11 crc kubenswrapper[4788]: I1211 10:38:11.296312 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-kube-api-access-jpbmc" (OuterVolumeSpecName: "kube-api-access-jpbmc") pod "1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf" (UID: "1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf"). InnerVolumeSpecName "kube-api-access-jpbmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:38:11 crc kubenswrapper[4788]: I1211 10:38:11.394702 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpbmc\" (UniqueName: \"kubernetes.io/projected/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf-kube-api-access-jpbmc\") on node \"crc\" DevicePath \"\"" Dec 11 10:38:12 crc kubenswrapper[4788]: I1211 10:38:12.103332 4788 scope.go:117] "RemoveContainer" containerID="29a1465ac03527733719f72dabd52c8c89f90d4b025f23613d09e7b8cbd43c7a" Dec 11 10:38:12 crc kubenswrapper[4788]: I1211 10:38:12.103459 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/crc-debug-qwhqq" Dec 11 10:38:12 crc kubenswrapper[4788]: I1211 10:38:12.519338 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf" path="/var/lib/kubelet/pods/1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf/volumes" Dec 11 10:38:13 crc kubenswrapper[4788]: I1211 10:38:13.496550 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:38:13 crc kubenswrapper[4788]: E1211 10:38:13.496862 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:38:26 crc kubenswrapper[4788]: I1211 10:38:26.495988 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:38:26 crc kubenswrapper[4788]: E1211 10:38:26.496897 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:38:37 crc kubenswrapper[4788]: I1211 10:38:37.950470 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-65f8fcd946-pfr68_f4638f63-07df-47e5-942d-3061f2162f08/barbican-api/0.log" Dec 11 10:38:38 crc kubenswrapper[4788]: I1211 10:38:38.123020 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-65f8fcd946-pfr68_f4638f63-07df-47e5-942d-3061f2162f08/barbican-api-log/0.log" Dec 11 10:38:38 crc kubenswrapper[4788]: I1211 10:38:38.145877 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6fd98b95d6-wxbsw_5e2739c9-c97e-4807-bde7-172073652810/barbican-keystone-listener/0.log" Dec 11 10:38:38 crc kubenswrapper[4788]: I1211 10:38:38.229126 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6fd98b95d6-wxbsw_5e2739c9-c97e-4807-bde7-172073652810/barbican-keystone-listener-log/0.log" Dec 11 10:38:38 crc kubenswrapper[4788]: I1211 10:38:38.318169 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-65644684d5-hmnmb_3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7/barbican-worker/0.log" Dec 11 10:38:38 crc kubenswrapper[4788]: I1211 10:38:38.392130 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-65644684d5-hmnmb_3f0c7a5f-90fb-47e4-86b8-f4e071edc3f7/barbican-worker-log/0.log" Dec 11 10:38:38 crc kubenswrapper[4788]: I1211 10:38:38.564514 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-vvn2g_f6a40a8b-5427-40c5-b48b-18df0deb1e39/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:38 crc kubenswrapper[4788]: I1211 10:38:38.630567 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7e44e4da-495b-45b4-bfef-27ccb5dd3a51/ceilometer-central-agent/0.log" Dec 11 10:38:39 crc kubenswrapper[4788]: I1211 10:38:39.193856 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7e44e4da-495b-45b4-bfef-27ccb5dd3a51/sg-core/0.log" Dec 11 10:38:39 crc kubenswrapper[4788]: I1211 10:38:39.198570 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7e44e4da-495b-45b4-bfef-27ccb5dd3a51/proxy-httpd/0.log" Dec 11 10:38:39 crc kubenswrapper[4788]: I1211 10:38:39.208337 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7e44e4da-495b-45b4-bfef-27ccb5dd3a51/ceilometer-notification-agent/0.log" Dec 11 10:38:39 crc kubenswrapper[4788]: I1211 10:38:39.452409 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_300a8660-46c4-426b-b4a2-3f713fe639b2/cinder-api-log/0.log" Dec 11 10:38:39 crc kubenswrapper[4788]: I1211 10:38:39.484934 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_300a8660-46c4-426b-b4a2-3f713fe639b2/cinder-api/0.log" Dec 11 10:38:39 crc kubenswrapper[4788]: I1211 10:38:39.507686 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b29f4b83-e1ef-49cf-82eb-e7f080c7b28b/cinder-scheduler/0.log" Dec 11 10:38:39 crc kubenswrapper[4788]: I1211 10:38:39.660286 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b29f4b83-e1ef-49cf-82eb-e7f080c7b28b/probe/0.log" Dec 11 10:38:39 crc kubenswrapper[4788]: I1211 10:38:39.729636 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-ztjhk_1203ac2a-acfa-4b1a-bba7-97eff5508d35/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:39 crc kubenswrapper[4788]: I1211 10:38:39.907957 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-hkjrg_7eb3182b-8f0a-4c94-b59b-4d631cd5f52d/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:39 crc kubenswrapper[4788]: I1211 10:38:39.922991 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-7bm7g_f620c56e-f069-4bd1-9dc9-092bf9beaf91/init/0.log" Dec 11 10:38:40 crc kubenswrapper[4788]: I1211 10:38:40.100041 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-7bm7g_f620c56e-f069-4bd1-9dc9-092bf9beaf91/init/0.log" Dec 11 10:38:40 crc kubenswrapper[4788]: I1211 10:38:40.172426 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-ppx2d_8cceb164-ffe1-4a11-83e9-888f72ad58f0/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:40 crc kubenswrapper[4788]: I1211 10:38:40.173390 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78c64bc9c5-7bm7g_f620c56e-f069-4bd1-9dc9-092bf9beaf91/dnsmasq-dns/0.log" Dec 11 10:38:40 crc kubenswrapper[4788]: I1211 10:38:40.328145 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_621ba590-fb77-4a71-a559-62c75a7f15dc/glance-httpd/0.log" Dec 11 10:38:40 crc kubenswrapper[4788]: I1211 10:38:40.335706 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_621ba590-fb77-4a71-a559-62c75a7f15dc/glance-log/0.log" Dec 11 10:38:40 crc kubenswrapper[4788]: I1211 10:38:40.524425 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_67b7dab2-3849-4249-99b5-63547063f12b/glance-httpd/0.log" Dec 11 10:38:40 crc kubenswrapper[4788]: I1211 10:38:40.532218 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_67b7dab2-3849-4249-99b5-63547063f12b/glance-log/0.log" Dec 11 10:38:40 crc kubenswrapper[4788]: I1211 10:38:40.893888 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-84d5c869dd-hzg6f_3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251/horizon/0.log" Dec 11 10:38:40 crc kubenswrapper[4788]: I1211 10:38:40.960386 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-cclp8_ab20916c-42ba-431e-af33-cf55f453378e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:41 crc kubenswrapper[4788]: I1211 10:38:41.204437 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-fkd9r_fdb8e800-98d6-4c2a-be3a-773e70a3dbff/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:41 crc kubenswrapper[4788]: I1211 10:38:41.439952 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-84d5c869dd-hzg6f_3ab8e5b5-ea4e-46e1-bc44-5f66e9a8d251/horizon-log/0.log" Dec 11 10:38:41 crc kubenswrapper[4788]: I1211 10:38:41.459439 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29424121-g2d82_f1b95b41-dc96-4424-84a4-3c647a46ef4e/keystone-cron/0.log" Dec 11 10:38:41 crc kubenswrapper[4788]: I1211 10:38:41.474156 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-c99b79967-dmp47_97f22e9d-3a9b-420e-a97d-0421c447bebb/keystone-api/0.log" Dec 11 10:38:41 crc kubenswrapper[4788]: I1211 10:38:41.495682 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:38:41 crc kubenswrapper[4788]: E1211 10:38:41.496042 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:38:41 crc kubenswrapper[4788]: I1211 10:38:41.662241 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_6679f5b3-4784-41d8-8475-fc65b77bb7c7/kube-state-metrics/0.log" Dec 11 10:38:41 crc kubenswrapper[4788]: I1211 10:38:41.672661 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-pj4c8_20db1208-f411-4f0b-87da-e10fc9a8c4f9/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:42 crc kubenswrapper[4788]: I1211 10:38:42.007291 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-564d966fb9-4l95x_ac53e60f-bd33-417c-b606-cbe350b6597a/neutron-httpd/0.log" Dec 11 10:38:42 crc kubenswrapper[4788]: I1211 10:38:42.039884 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-564d966fb9-4l95x_ac53e60f-bd33-417c-b606-cbe350b6597a/neutron-api/0.log" Dec 11 10:38:42 crc kubenswrapper[4788]: I1211 10:38:42.179480 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-2ptsl_625e8b87-3138-45b9-935d-d26e22240053/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:42 crc kubenswrapper[4788]: I1211 10:38:42.703782 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0d35e32a-7f87-4a69-9233-7d8bb40fec75/nova-api-log/0.log" Dec 11 10:38:42 crc kubenswrapper[4788]: I1211 10:38:42.774466 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_e0dba180-1bb0-4596-be23-66721a174129/nova-cell0-conductor-conductor/0.log" Dec 11 10:38:43 crc kubenswrapper[4788]: I1211 10:38:43.138765 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_d22ef65f-e312-488f-8607-514c224411e8/nova-cell1-conductor-conductor/0.log" Dec 11 10:38:43 crc kubenswrapper[4788]: I1211 10:38:43.179281 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_a1fd7161-dba8-481a-946b-07baf45ffcdf/nova-cell1-novncproxy-novncproxy/0.log" Dec 11 10:38:43 crc kubenswrapper[4788]: I1211 10:38:43.208174 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0d35e32a-7f87-4a69-9233-7d8bb40fec75/nova-api-api/0.log" Dec 11 10:38:43 crc kubenswrapper[4788]: I1211 10:38:43.436276 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-5mm6f_2d2d45a1-244d-4e91-be2b-db12eb484a25/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:43 crc kubenswrapper[4788]: I1211 10:38:43.588516 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d255cc24-14bd-4114-938b-c91acbe5c5d2/nova-metadata-log/0.log" Dec 11 10:38:43 crc kubenswrapper[4788]: I1211 10:38:43.935860 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_b97d64be-b549-4abc-92a4-be155a300b1a/nova-scheduler-scheduler/0.log" Dec 11 10:38:43 crc kubenswrapper[4788]: I1211 10:38:43.963465 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f98e1b46-07d0-44d9-810c-4a778d44837d/mysql-bootstrap/0.log" Dec 11 10:38:44 crc kubenswrapper[4788]: I1211 10:38:44.245122 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f98e1b46-07d0-44d9-810c-4a778d44837d/mysql-bootstrap/0.log" Dec 11 10:38:44 crc kubenswrapper[4788]: I1211 10:38:44.267684 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f98e1b46-07d0-44d9-810c-4a778d44837d/galera/0.log" Dec 11 10:38:44 crc kubenswrapper[4788]: I1211 10:38:44.468505 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2552ec58-e76a-4c17-ab79-ac237c6d972c/mysql-bootstrap/0.log" Dec 11 10:38:44 crc kubenswrapper[4788]: I1211 10:38:44.665799 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2552ec58-e76a-4c17-ab79-ac237c6d972c/mysql-bootstrap/0.log" Dec 11 10:38:44 crc kubenswrapper[4788]: I1211 10:38:44.719130 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_2552ec58-e76a-4c17-ab79-ac237c6d972c/galera/0.log" Dec 11 10:38:44 crc kubenswrapper[4788]: I1211 10:38:44.892305 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_4bae27c1-f73d-4bdb-91a2-185dd601bc33/openstackclient/0.log" Dec 11 10:38:44 crc kubenswrapper[4788]: I1211 10:38:44.993151 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-5hgrc_b899f552-09d4-4919-a3f1-79ff044cd435/ovn-controller/0.log" Dec 11 10:38:45 crc kubenswrapper[4788]: I1211 10:38:45.155644 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-gpspm_f000044f-b0a7-417e-8278-5deb090b8105/openstack-network-exporter/0.log" Dec 11 10:38:45 crc kubenswrapper[4788]: I1211 10:38:45.422721 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7474g_ffcb62ce-c938-498b-9026-8fe40512245a/ovsdb-server-init/0.log" Dec 11 10:38:45 crc kubenswrapper[4788]: I1211 10:38:45.432545 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d255cc24-14bd-4114-938b-c91acbe5c5d2/nova-metadata-metadata/0.log" Dec 11 10:38:45 crc kubenswrapper[4788]: I1211 10:38:45.577954 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7474g_ffcb62ce-c938-498b-9026-8fe40512245a/ovs-vswitchd/0.log" Dec 11 10:38:45 crc kubenswrapper[4788]: I1211 10:38:45.647494 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7474g_ffcb62ce-c938-498b-9026-8fe40512245a/ovsdb-server-init/0.log" Dec 11 10:38:45 crc kubenswrapper[4788]: I1211 10:38:45.723198 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7474g_ffcb62ce-c938-498b-9026-8fe40512245a/ovsdb-server/0.log" Dec 11 10:38:45 crc kubenswrapper[4788]: I1211 10:38:45.893818 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-wh8qs_6de52d95-9e8d-402b-9fd3-3431a58a61e8/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:45 crc kubenswrapper[4788]: I1211 10:38:45.950465 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb/ovn-northd/0.log" Dec 11 10:38:45 crc kubenswrapper[4788]: I1211 10:38:45.969702 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b2ff5a2a-3f5b-4727-be60-7d69ed12b1bb/openstack-network-exporter/0.log" Dec 11 10:38:46 crc kubenswrapper[4788]: I1211 10:38:46.170218 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3858a264-6dc8-4a58-8e80-3d57649da896/openstack-network-exporter/0.log" Dec 11 10:38:46 crc kubenswrapper[4788]: I1211 10:38:46.239134 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3858a264-6dc8-4a58-8e80-3d57649da896/ovsdbserver-nb/0.log" Dec 11 10:38:46 crc kubenswrapper[4788]: I1211 10:38:46.370641 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_428e76c9-65ed-434c-a25d-6bcd956b48d5/openstack-network-exporter/0.log" Dec 11 10:38:46 crc kubenswrapper[4788]: I1211 10:38:46.392760 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_428e76c9-65ed-434c-a25d-6bcd956b48d5/ovsdbserver-sb/0.log" Dec 11 10:38:46 crc kubenswrapper[4788]: I1211 10:38:46.600200 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-564d895c68-qq26m_6e6612ef-ed63-43eb-a29c-a4dee4798be8/placement-api/0.log" Dec 11 10:38:46 crc kubenswrapper[4788]: I1211 10:38:46.688010 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-564d895c68-qq26m_6e6612ef-ed63-43eb-a29c-a4dee4798be8/placement-log/0.log" Dec 11 10:38:46 crc kubenswrapper[4788]: I1211 10:38:46.773027 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_866f442b-155f-40fb-836d-3cc2add24e36/setup-container/0.log" Dec 11 10:38:46 crc kubenswrapper[4788]: I1211 10:38:46.977659 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_866f442b-155f-40fb-836d-3cc2add24e36/setup-container/0.log" Dec 11 10:38:47 crc kubenswrapper[4788]: I1211 10:38:47.031210 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_866f442b-155f-40fb-836d-3cc2add24e36/rabbitmq/0.log" Dec 11 10:38:47 crc kubenswrapper[4788]: I1211 10:38:47.047484 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2e0d2c67-915a-4461-ab83-75e349c18950/setup-container/0.log" Dec 11 10:38:47 crc kubenswrapper[4788]: I1211 10:38:47.260780 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2e0d2c67-915a-4461-ab83-75e349c18950/setup-container/0.log" Dec 11 10:38:47 crc kubenswrapper[4788]: I1211 10:38:47.276442 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2e0d2c67-915a-4461-ab83-75e349c18950/rabbitmq/0.log" Dec 11 10:38:47 crc kubenswrapper[4788]: I1211 10:38:47.321280 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-ksrxm_ecf372fc-dfed-418f-904f-8a2e485acbcd/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:47 crc kubenswrapper[4788]: I1211 10:38:47.521730 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-pw4mb_888d4b4c-ac58-4ac8-8c53-fa41a750aaef/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:47 crc kubenswrapper[4788]: I1211 10:38:47.542037 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-phwpb_3b286cdf-f5f9-4ad2-ab37-0e4697309be5/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:47 crc kubenswrapper[4788]: I1211 10:38:47.790385 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-qwvh4_5459ff9c-bd04-47d1-ade7-e52983b8fc86/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:47 crc kubenswrapper[4788]: I1211 10:38:47.898638 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-w29p9_bb7a8155-00dc-4dc1-9fc3-92417a6264ff/ssh-known-hosts-edpm-deployment/0.log" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.157027 4788 scope.go:117] "RemoveContainer" containerID="6c5408b3fcae49c9b6311170c54a181cbd80bca9b617b2e4576c5f2c47881eea" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.200304 4788 scope.go:117] "RemoveContainer" containerID="7d4201392dde9a241786a24048576a788cd51392141119a696b504af62ccc32a" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.200799 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-d6d65bd59-g294c_c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5/proxy-server/0.log" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.241207 4788 scope.go:117] "RemoveContainer" containerID="73f6992db0798fb0d112eb902332a47e5f612a7a2cbb4f085dd1b3b344c74f40" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.290074 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-d6d65bd59-g294c_c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5/proxy-httpd/0.log" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.303937 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-m5bdb_6f5299be-0ac1-4048-b2aa-6a07ce5c30e1/swift-ring-rebalance/0.log" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.455797 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/account-auditor/0.log" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.517791 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/account-reaper/0.log" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.597011 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/account-replicator/0.log" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.714518 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/container-auditor/0.log" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.751878 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/account-server/0.log" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.811096 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/container-server/0.log" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.969568 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/container-replicator/0.log" Dec 11 10:38:48 crc kubenswrapper[4788]: I1211 10:38:48.991160 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/container-updater/0.log" Dec 11 10:38:49 crc kubenswrapper[4788]: I1211 10:38:49.050751 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/object-auditor/0.log" Dec 11 10:38:49 crc kubenswrapper[4788]: I1211 10:38:49.087772 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/object-expirer/0.log" Dec 11 10:38:49 crc kubenswrapper[4788]: I1211 10:38:49.231708 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/object-server/0.log" Dec 11 10:38:49 crc kubenswrapper[4788]: I1211 10:38:49.263869 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/object-updater/0.log" Dec 11 10:38:49 crc kubenswrapper[4788]: I1211 10:38:49.308928 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/rsync/0.log" Dec 11 10:38:49 crc kubenswrapper[4788]: I1211 10:38:49.313397 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/object-replicator/0.log" Dec 11 10:38:49 crc kubenswrapper[4788]: I1211 10:38:49.493104 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_874f552a-7856-439c-937c-a87d9c15305c/swift-recon-cron/0.log" Dec 11 10:38:49 crc kubenswrapper[4788]: I1211 10:38:49.594378 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-jjlt7_5395fbbe-5f31-4c60-bee6-09b492d13e36/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:49 crc kubenswrapper[4788]: I1211 10:38:49.743351 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_5cf588e9-b8ff-4862-9a71-8e6805d89b2a/tempest-tests-tempest-tests-runner/0.log" Dec 11 10:38:49 crc kubenswrapper[4788]: I1211 10:38:49.886687 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_c7921196-c38d-467e-b733-20416c265305/test-operator-logs-container/0.log" Dec 11 10:38:49 crc kubenswrapper[4788]: I1211 10:38:49.969074 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-g8htq_f49b48e9-71fc-4a17-9cf2-c5831649bda2/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 11 10:38:54 crc kubenswrapper[4788]: I1211 10:38:54.496796 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:38:54 crc kubenswrapper[4788]: E1211 10:38:54.497497 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:39:01 crc kubenswrapper[4788]: I1211 10:39:01.922625 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_eba9b8bc-1fe1-4ba9-9521-a21c25bed6be/memcached/0.log" Dec 11 10:39:06 crc kubenswrapper[4788]: I1211 10:39:06.495957 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:39:06 crc kubenswrapper[4788]: E1211 10:39:06.496761 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:39:17 crc kubenswrapper[4788]: I1211 10:39:17.713187 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-nnspt_03f510d3-616e-454c-9086-687604b0cff1/kube-rbac-proxy/0.log" Dec 11 10:39:17 crc kubenswrapper[4788]: I1211 10:39:17.738315 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-nnspt_03f510d3-616e-454c-9086-687604b0cff1/manager/0.log" Dec 11 10:39:17 crc kubenswrapper[4788]: I1211 10:39:17.907795 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-qr4bz_f2525214-ff81-4638-baa5-afcd178f9ec6/kube-rbac-proxy/0.log" Dec 11 10:39:17 crc kubenswrapper[4788]: I1211 10:39:17.975022 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-qr4bz_f2525214-ff81-4638-baa5-afcd178f9ec6/manager/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.094549 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-688sb_cb780059-66e2-48f4-913b-271489226ef9/kube-rbac-proxy/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.128179 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-688sb_cb780059-66e2-48f4-913b-271489226ef9/manager/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.130577 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/util/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.423718 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/pull/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.429863 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/util/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.453712 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/pull/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.617864 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/pull/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.649020 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/util/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.663691 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e159d2649ce989beb074d805325a5a121c094dfa7306b88b575cdafa02j24dg_d16150ce-c1df-425d-b361-f6aba1dba525/extract/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.832485 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-kdjld_684c9c50-d818-41d8-852d-82f5937c18ab/kube-rbac-proxy/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.904134 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-kdjld_684c9c50-d818-41d8-852d-82f5937c18ab/manager/0.log" Dec 11 10:39:18 crc kubenswrapper[4788]: I1211 10:39:18.946079 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-zr4vh_0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795/kube-rbac-proxy/0.log" Dec 11 10:39:19 crc kubenswrapper[4788]: I1211 10:39:19.105297 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-zr4vh_0fb6d7a6-5ef9-4c32-a4b6-bcdc65102795/manager/0.log" Dec 11 10:39:19 crc kubenswrapper[4788]: I1211 10:39:19.166640 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-p9v6z_348b99db-d5ca-41e2-b2a0-f22f6aeca6b0/kube-rbac-proxy/0.log" Dec 11 10:39:19 crc kubenswrapper[4788]: I1211 10:39:19.228194 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-p9v6z_348b99db-d5ca-41e2-b2a0-f22f6aeca6b0/manager/0.log" Dec 11 10:39:19 crc kubenswrapper[4788]: I1211 10:39:19.349982 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-b7lzd_2868074d-eb62-4d8a-b275-047d72fec830/kube-rbac-proxy/0.log" Dec 11 10:39:19 crc kubenswrapper[4788]: I1211 10:39:19.588526 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-vzs5b_7707f72a-2719-46de-8409-b8d397a4ce03/kube-rbac-proxy/0.log" Dec 11 10:39:19 crc kubenswrapper[4788]: I1211 10:39:19.610098 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-vzs5b_7707f72a-2719-46de-8409-b8d397a4ce03/manager/0.log" Dec 11 10:39:19 crc kubenswrapper[4788]: I1211 10:39:19.625124 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-b7lzd_2868074d-eb62-4d8a-b275-047d72fec830/manager/0.log" Dec 11 10:39:19 crc kubenswrapper[4788]: I1211 10:39:19.782178 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-mpztw_bf947be7-c3ef-4ae6-beff-11d5ae6d1f94/kube-rbac-proxy/0.log" Dec 11 10:39:19 crc kubenswrapper[4788]: I1211 10:39:19.870612 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-mpztw_bf947be7-c3ef-4ae6-beff-11d5ae6d1f94/manager/0.log" Dec 11 10:39:19 crc kubenswrapper[4788]: I1211 10:39:19.999206 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-lcx6f_354b87a3-d193-427f-8620-f7fcb52acb67/kube-rbac-proxy/0.log" Dec 11 10:39:20 crc kubenswrapper[4788]: I1211 10:39:20.000429 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-lcx6f_354b87a3-d193-427f-8620-f7fcb52acb67/manager/0.log" Dec 11 10:39:20 crc kubenswrapper[4788]: I1211 10:39:20.068200 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-dmnk5_d78d8bad-e298-41b5-82fa-d4cf464d28dd/kube-rbac-proxy/0.log" Dec 11 10:39:20 crc kubenswrapper[4788]: I1211 10:39:20.188204 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-dmnk5_d78d8bad-e298-41b5-82fa-d4cf464d28dd/manager/0.log" Dec 11 10:39:20 crc kubenswrapper[4788]: I1211 10:39:20.296219 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-hlfq2_f7a4db33-474d-496e-b745-939ce842904d/kube-rbac-proxy/0.log" Dec 11 10:39:20 crc kubenswrapper[4788]: I1211 10:39:20.363852 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-hlfq2_f7a4db33-474d-496e-b745-939ce842904d/manager/0.log" Dec 11 10:39:20 crc kubenswrapper[4788]: I1211 10:39:20.454528 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-pkxtw_5fa7cb98-c29a-4efb-81ff-710523478ec0/kube-rbac-proxy/0.log" Dec 11 10:39:20 crc kubenswrapper[4788]: I1211 10:39:20.575658 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-pkxtw_5fa7cb98-c29a-4efb-81ff-710523478ec0/manager/0.log" Dec 11 10:39:20 crc kubenswrapper[4788]: I1211 10:39:20.644006 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-brq4g_556f2b13-91d6-4261-9e7a-bed452e436eb/kube-rbac-proxy/0.log" Dec 11 10:39:20 crc kubenswrapper[4788]: I1211 10:39:20.680269 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-brq4g_556f2b13-91d6-4261-9e7a-bed452e436eb/manager/0.log" Dec 11 10:39:20 crc kubenswrapper[4788]: I1211 10:39:20.831900 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879ff2lw9_6491a596-c46a-45c7-9430-4d9f6a40a6d2/kube-rbac-proxy/0.log" Dec 11 10:39:20 crc kubenswrapper[4788]: I1211 10:39:20.881181 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879ff2lw9_6491a596-c46a-45c7-9430-4d9f6a40a6d2/manager/0.log" Dec 11 10:39:21 crc kubenswrapper[4788]: I1211 10:39:21.244690 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7dcd4874fd-59fm4_3ad7c2ee-56fc-41ad-9f63-2697aa291948/operator/0.log" Dec 11 10:39:21 crc kubenswrapper[4788]: I1211 10:39:21.437678 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-8g2bh_78fdcbf0-ff20-43b8-bf9a-ded097de063c/registry-server/0.log" Dec 11 10:39:21 crc kubenswrapper[4788]: I1211 10:39:21.495596 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:39:21 crc kubenswrapper[4788]: I1211 10:39:21.498263 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-zqgjl_c0d61f31-e8b5-454d-8961-cedc33a2efa2/kube-rbac-proxy/0.log" Dec 11 10:39:21 crc kubenswrapper[4788]: I1211 10:39:21.741075 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-zqgjl_c0d61f31-e8b5-454d-8961-cedc33a2efa2/manager/0.log" Dec 11 10:39:21 crc kubenswrapper[4788]: I1211 10:39:21.769852 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-gz22l_f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d/kube-rbac-proxy/0.log" Dec 11 10:39:21 crc kubenswrapper[4788]: I1211 10:39:21.790984 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-gz22l_f2b219e7-4ad9-4b16-96cc-20a6f7c33c4d/manager/0.log" Dec 11 10:39:21 crc kubenswrapper[4788]: I1211 10:39:21.806339 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"847163806f175cd9a844cf382073ecc2cacc7949ecf2f17154532d9647276ded"} Dec 11 10:39:21 crc kubenswrapper[4788]: I1211 10:39:21.988450 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-gbfnb_67082483-5ed3-4141-a7be-d3f95f5b07c4/operator/0.log" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.183128 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-v87tj_7e94e3ad-b1bf-44e3-aa17-52380cb0e651/manager/0.log" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.184006 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-v87tj_7e94e3ad-b1bf-44e3-aa17-52380cb0e651/kube-rbac-proxy/0.log" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.208832 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-65d64bdc49-5hs5g_f8ee8e1d-0519-4464-9ca1-17d37770dcdc/manager/0.log" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.395210 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-54v29"] Dec 11 10:39:22 crc kubenswrapper[4788]: E1211 10:39:22.395758 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf" containerName="container-00" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.395795 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf" containerName="container-00" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.396033 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="1304c4c0-0a87-4a3c-8fa2-0bce7f6c13bf" containerName="container-00" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.397592 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.414963 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-54v29"] Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.452540 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-sxb22_f6e78ec3-ccc1-48b2-8ba6-962de2a25249/kube-rbac-proxy/0.log" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.544997 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-sxb22_f6e78ec3-ccc1-48b2-8ba6-962de2a25249/manager/0.log" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.587083 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-catalog-content\") pod \"redhat-operators-54v29\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.587282 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqmbm\" (UniqueName: \"kubernetes.io/projected/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-kube-api-access-vqmbm\") pod \"redhat-operators-54v29\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.587337 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-utilities\") pod \"redhat-operators-54v29\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.618292 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-z7q4w_726d9db1-d370-4bea-b91d-6beff7ba4b6b/kube-rbac-proxy/0.log" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.647074 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-z7q4w_726d9db1-d370-4bea-b91d-6beff7ba4b6b/manager/0.log" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.690467 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqmbm\" (UniqueName: \"kubernetes.io/projected/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-kube-api-access-vqmbm\") pod \"redhat-operators-54v29\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.690573 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-utilities\") pod \"redhat-operators-54v29\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.690737 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-catalog-content\") pod \"redhat-operators-54v29\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.691392 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-utilities\") pod \"redhat-operators-54v29\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:22 crc kubenswrapper[4788]: I1211 10:39:22.691438 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-catalog-content\") pod \"redhat-operators-54v29\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:23 crc kubenswrapper[4788]: I1211 10:39:23.258611 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqmbm\" (UniqueName: \"kubernetes.io/projected/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-kube-api-access-vqmbm\") pod \"redhat-operators-54v29\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:23 crc kubenswrapper[4788]: I1211 10:39:23.325993 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:23 crc kubenswrapper[4788]: I1211 10:39:23.513965 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75944c9b7-kggr7_ce133f4a-b1fd-4e51-8e4f-390d6f125e1d/kube-rbac-proxy/0.log" Dec 11 10:39:23 crc kubenswrapper[4788]: I1211 10:39:23.518146 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75944c9b7-kggr7_ce133f4a-b1fd-4e51-8e4f-390d6f125e1d/manager/0.log" Dec 11 10:39:23 crc kubenswrapper[4788]: I1211 10:39:23.833480 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-54v29"] Dec 11 10:39:24 crc kubenswrapper[4788]: I1211 10:39:24.836451 4788 generic.go:334] "Generic (PLEG): container finished" podID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerID="3f48f178a9af8d98bd450df653b852f0a3ac0835db57ab9493f7aa512d82cd54" exitCode=0 Dec 11 10:39:24 crc kubenswrapper[4788]: I1211 10:39:24.836561 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54v29" event={"ID":"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d","Type":"ContainerDied","Data":"3f48f178a9af8d98bd450df653b852f0a3ac0835db57ab9493f7aa512d82cd54"} Dec 11 10:39:24 crc kubenswrapper[4788]: I1211 10:39:24.836941 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54v29" event={"ID":"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d","Type":"ContainerStarted","Data":"694e7280bcd54a87f51c9f2306370907130c41fda62ddfeabb8520f7907c2b92"} Dec 11 10:39:24 crc kubenswrapper[4788]: I1211 10:39:24.839218 4788 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 10:39:25 crc kubenswrapper[4788]: I1211 10:39:25.847323 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54v29" event={"ID":"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d","Type":"ContainerStarted","Data":"31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6"} Dec 11 10:39:26 crc kubenswrapper[4788]: I1211 10:39:26.859525 4788 generic.go:334] "Generic (PLEG): container finished" podID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerID="31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6" exitCode=0 Dec 11 10:39:26 crc kubenswrapper[4788]: I1211 10:39:26.860505 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54v29" event={"ID":"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d","Type":"ContainerDied","Data":"31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6"} Dec 11 10:39:27 crc kubenswrapper[4788]: I1211 10:39:27.871212 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54v29" event={"ID":"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d","Type":"ContainerStarted","Data":"dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73"} Dec 11 10:39:27 crc kubenswrapper[4788]: I1211 10:39:27.895963 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-54v29" podStartSLOduration=3.117318524 podStartE2EDuration="5.895938023s" podCreationTimestamp="2025-12-11 10:39:22 +0000 UTC" firstStartedPulling="2025-12-11 10:39:24.838942021 +0000 UTC m=+4694.909721607" lastFinishedPulling="2025-12-11 10:39:27.61756152 +0000 UTC m=+4697.688341106" observedRunningTime="2025-12-11 10:39:27.888555396 +0000 UTC m=+4697.959334982" watchObservedRunningTime="2025-12-11 10:39:27.895938023 +0000 UTC m=+4697.966717609" Dec 11 10:39:33 crc kubenswrapper[4788]: I1211 10:39:33.326610 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:33 crc kubenswrapper[4788]: I1211 10:39:33.327312 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:34 crc kubenswrapper[4788]: I1211 10:39:34.381489 4788 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-54v29" podUID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerName="registry-server" probeResult="failure" output=< Dec 11 10:39:34 crc kubenswrapper[4788]: timeout: failed to connect service ":50051" within 1s Dec 11 10:39:34 crc kubenswrapper[4788]: > Dec 11 10:39:43 crc kubenswrapper[4788]: I1211 10:39:43.378704 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:43 crc kubenswrapper[4788]: I1211 10:39:43.427726 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:43 crc kubenswrapper[4788]: I1211 10:39:43.627760 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-54v29"] Dec 11 10:39:43 crc kubenswrapper[4788]: I1211 10:39:43.992017 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5ztq4_b39d101c-fbdd-427c-9369-cbfde9bb50cd/control-plane-machine-set-operator/0.log" Dec 11 10:39:44 crc kubenswrapper[4788]: I1211 10:39:44.178015 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-47nzd_0783fcec-bf1b-4910-b8c2-08d85c53093a/kube-rbac-proxy/0.log" Dec 11 10:39:44 crc kubenswrapper[4788]: I1211 10:39:44.207897 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-47nzd_0783fcec-bf1b-4910-b8c2-08d85c53093a/machine-api-operator/0.log" Dec 11 10:39:45 crc kubenswrapper[4788]: I1211 10:39:45.038534 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-54v29" podUID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerName="registry-server" containerID="cri-o://dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73" gracePeriod=2 Dec 11 10:39:45 crc kubenswrapper[4788]: I1211 10:39:45.585898 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:45 crc kubenswrapper[4788]: I1211 10:39:45.765160 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqmbm\" (UniqueName: \"kubernetes.io/projected/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-kube-api-access-vqmbm\") pod \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " Dec 11 10:39:45 crc kubenswrapper[4788]: I1211 10:39:45.765391 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-catalog-content\") pod \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " Dec 11 10:39:45 crc kubenswrapper[4788]: I1211 10:39:45.765484 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-utilities\") pod \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\" (UID: \"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d\") " Dec 11 10:39:45 crc kubenswrapper[4788]: I1211 10:39:45.766215 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-utilities" (OuterVolumeSpecName: "utilities") pod "4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" (UID: "4849cffe-25c8-4e6a-af9d-292bf4d1ed5d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:39:45 crc kubenswrapper[4788]: I1211 10:39:45.780592 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-kube-api-access-vqmbm" (OuterVolumeSpecName: "kube-api-access-vqmbm") pod "4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" (UID: "4849cffe-25c8-4e6a-af9d-292bf4d1ed5d"). InnerVolumeSpecName "kube-api-access-vqmbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:39:45 crc kubenswrapper[4788]: I1211 10:39:45.867637 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:39:45 crc kubenswrapper[4788]: I1211 10:39:45.867685 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqmbm\" (UniqueName: \"kubernetes.io/projected/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-kube-api-access-vqmbm\") on node \"crc\" DevicePath \"\"" Dec 11 10:39:45 crc kubenswrapper[4788]: I1211 10:39:45.877376 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" (UID: "4849cffe-25c8-4e6a-af9d-292bf4d1ed5d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:39:45 crc kubenswrapper[4788]: I1211 10:39:45.969806 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.050653 4788 generic.go:334] "Generic (PLEG): container finished" podID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerID="dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73" exitCode=0 Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.050712 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54v29" event={"ID":"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d","Type":"ContainerDied","Data":"dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73"} Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.050737 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-54v29" Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.050755 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-54v29" event={"ID":"4849cffe-25c8-4e6a-af9d-292bf4d1ed5d","Type":"ContainerDied","Data":"694e7280bcd54a87f51c9f2306370907130c41fda62ddfeabb8520f7907c2b92"} Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.050778 4788 scope.go:117] "RemoveContainer" containerID="dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73" Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.076683 4788 scope.go:117] "RemoveContainer" containerID="31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6" Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.087846 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-54v29"] Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.096747 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-54v29"] Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.506691 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" path="/var/lib/kubelet/pods/4849cffe-25c8-4e6a-af9d-292bf4d1ed5d/volumes" Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.582463 4788 scope.go:117] "RemoveContainer" containerID="3f48f178a9af8d98bd450df653b852f0a3ac0835db57ab9493f7aa512d82cd54" Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.622495 4788 scope.go:117] "RemoveContainer" containerID="dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73" Dec 11 10:39:46 crc kubenswrapper[4788]: E1211 10:39:46.623138 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73\": container with ID starting with dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73 not found: ID does not exist" containerID="dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73" Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.623172 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73"} err="failed to get container status \"dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73\": rpc error: code = NotFound desc = could not find container \"dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73\": container with ID starting with dedeb042e8b446c8b3fbd4cfa0da24c6957a261e6842fc816ac8ed9969281e73 not found: ID does not exist" Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.623196 4788 scope.go:117] "RemoveContainer" containerID="31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6" Dec 11 10:39:46 crc kubenswrapper[4788]: E1211 10:39:46.623553 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6\": container with ID starting with 31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6 not found: ID does not exist" containerID="31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6" Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.623582 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6"} err="failed to get container status \"31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6\": rpc error: code = NotFound desc = could not find container \"31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6\": container with ID starting with 31c3e098b25568834662e2582357a599898840bb4c5363d71019816ad019e7e6 not found: ID does not exist" Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.623599 4788 scope.go:117] "RemoveContainer" containerID="3f48f178a9af8d98bd450df653b852f0a3ac0835db57ab9493f7aa512d82cd54" Dec 11 10:39:46 crc kubenswrapper[4788]: E1211 10:39:46.623899 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f48f178a9af8d98bd450df653b852f0a3ac0835db57ab9493f7aa512d82cd54\": container with ID starting with 3f48f178a9af8d98bd450df653b852f0a3ac0835db57ab9493f7aa512d82cd54 not found: ID does not exist" containerID="3f48f178a9af8d98bd450df653b852f0a3ac0835db57ab9493f7aa512d82cd54" Dec 11 10:39:46 crc kubenswrapper[4788]: I1211 10:39:46.623929 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f48f178a9af8d98bd450df653b852f0a3ac0835db57ab9493f7aa512d82cd54"} err="failed to get container status \"3f48f178a9af8d98bd450df653b852f0a3ac0835db57ab9493f7aa512d82cd54\": rpc error: code = NotFound desc = could not find container \"3f48f178a9af8d98bd450df653b852f0a3ac0835db57ab9493f7aa512d82cd54\": container with ID starting with 3f48f178a9af8d98bd450df653b852f0a3ac0835db57ab9493f7aa512d82cd54 not found: ID does not exist" Dec 11 10:39:57 crc kubenswrapper[4788]: I1211 10:39:57.528262 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-hq8gs_c6917182-a497-4802-8747-4a6c3e78a11f/cert-manager-controller/0.log" Dec 11 10:39:57 crc kubenswrapper[4788]: I1211 10:39:57.654784 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-t6sfj_0493623c-4b7f-4f28-a1bb-9303b031d9a0/cert-manager-cainjector/0.log" Dec 11 10:39:57 crc kubenswrapper[4788]: I1211 10:39:57.731287 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-697m4_dea5ca68-6922-46e4-81ed-8c917c670214/cert-manager-webhook/0.log" Dec 11 10:40:11 crc kubenswrapper[4788]: I1211 10:40:11.094499 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6ff7998486-z27wt_06c736a4-1288-473a-bceb-0951aced851f/nmstate-console-plugin/0.log" Dec 11 10:40:11 crc kubenswrapper[4788]: I1211 10:40:11.288777 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-b6mlg_88b842a1-c94f-4a0a-b845-d5330f12a0a1/nmstate-handler/0.log" Dec 11 10:40:11 crc kubenswrapper[4788]: I1211 10:40:11.333711 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-fprr4_49a187f1-90cf-4afb-9dec-10bf57b8ff69/kube-rbac-proxy/0.log" Dec 11 10:40:11 crc kubenswrapper[4788]: I1211 10:40:11.350398 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-fprr4_49a187f1-90cf-4afb-9dec-10bf57b8ff69/nmstate-metrics/0.log" Dec 11 10:40:11 crc kubenswrapper[4788]: I1211 10:40:11.517869 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-6769fb99d-8dw7z_0f1f9ffb-58ec-4282-b3a1-d9040b09023b/nmstate-operator/0.log" Dec 11 10:40:11 crc kubenswrapper[4788]: I1211 10:40:11.555844 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-f8fb84555-4vbrf_5baf07b9-7c94-4c71-99ee-37b2e68d0437/nmstate-webhook/0.log" Dec 11 10:40:25 crc kubenswrapper[4788]: I1211 10:40:25.434752 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-82svd_ac39e555-61a7-48d2-976e-e3a095bae216/kube-rbac-proxy/0.log" Dec 11 10:40:25 crc kubenswrapper[4788]: I1211 10:40:25.625046 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-82svd_ac39e555-61a7-48d2-976e-e3a095bae216/controller/0.log" Dec 11 10:40:25 crc kubenswrapper[4788]: I1211 10:40:25.697953 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-frr-files/0.log" Dec 11 10:40:26 crc kubenswrapper[4788]: I1211 10:40:26.622204 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-reloader/0.log" Dec 11 10:40:26 crc kubenswrapper[4788]: I1211 10:40:26.650199 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-frr-files/0.log" Dec 11 10:40:26 crc kubenswrapper[4788]: I1211 10:40:26.675808 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-metrics/0.log" Dec 11 10:40:26 crc kubenswrapper[4788]: I1211 10:40:26.703530 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-reloader/0.log" Dec 11 10:40:26 crc kubenswrapper[4788]: I1211 10:40:26.876944 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-frr-files/0.log" Dec 11 10:40:26 crc kubenswrapper[4788]: I1211 10:40:26.880794 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-reloader/0.log" Dec 11 10:40:26 crc kubenswrapper[4788]: I1211 10:40:26.900895 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-metrics/0.log" Dec 11 10:40:26 crc kubenswrapper[4788]: I1211 10:40:26.923537 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-metrics/0.log" Dec 11 10:40:27 crc kubenswrapper[4788]: I1211 10:40:27.079530 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-reloader/0.log" Dec 11 10:40:27 crc kubenswrapper[4788]: I1211 10:40:27.082661 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-frr-files/0.log" Dec 11 10:40:27 crc kubenswrapper[4788]: I1211 10:40:27.138354 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/cp-metrics/0.log" Dec 11 10:40:27 crc kubenswrapper[4788]: I1211 10:40:27.180177 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/controller/0.log" Dec 11 10:40:27 crc kubenswrapper[4788]: I1211 10:40:27.283503 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/frr-metrics/0.log" Dec 11 10:40:27 crc kubenswrapper[4788]: I1211 10:40:27.362332 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/kube-rbac-proxy/0.log" Dec 11 10:40:27 crc kubenswrapper[4788]: I1211 10:40:27.375715 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/kube-rbac-proxy-frr/0.log" Dec 11 10:40:27 crc kubenswrapper[4788]: I1211 10:40:27.544690 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/reloader/0.log" Dec 11 10:40:27 crc kubenswrapper[4788]: I1211 10:40:27.678216 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7784b6fcf-fc5pw_1ed60fe2-ac5e-46b3-a0f1-05436db532bb/frr-k8s-webhook-server/0.log" Dec 11 10:40:27 crc kubenswrapper[4788]: I1211 10:40:27.909860 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-64ccd66d65-8ldsp_d17f23e5-47b0-4b87-ab5c-32ac870eb738/manager/0.log" Dec 11 10:40:28 crc kubenswrapper[4788]: I1211 10:40:28.041074 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-57bc7764b6-mf4wf_2fec7bb6-d596-4da7-94d0-567cb78c94b2/webhook-server/0.log" Dec 11 10:40:28 crc kubenswrapper[4788]: I1211 10:40:28.220189 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-wfdvw_9edef12f-0a1f-45ad-8850-0d2edfc5384c/kube-rbac-proxy/0.log" Dec 11 10:40:28 crc kubenswrapper[4788]: I1211 10:40:28.740574 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-wfdvw_9edef12f-0a1f-45ad-8850-0d2edfc5384c/speaker/0.log" Dec 11 10:40:28 crc kubenswrapper[4788]: I1211 10:40:28.964671 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-thk86_20680b9d-91a0-4194-a35b-a25c14869938/frr/0.log" Dec 11 10:40:41 crc kubenswrapper[4788]: I1211 10:40:41.633774 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/util/0.log" Dec 11 10:40:41 crc kubenswrapper[4788]: I1211 10:40:41.826664 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/util/0.log" Dec 11 10:40:41 crc kubenswrapper[4788]: I1211 10:40:41.829105 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/pull/0.log" Dec 11 10:40:41 crc kubenswrapper[4788]: I1211 10:40:41.840030 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/pull/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.024324 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/pull/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.037214 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/extract/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.059744 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4t82fd_72e89d4d-8a18-4508-9e63-096103af0b70/util/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.210196 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/util/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.406296 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/util/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.406678 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/pull/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.416206 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/pull/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.540944 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/util/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.570926 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/pull/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.574857 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8bhtdj_d7be2709-1aa9-4107-88d0-ab448f415893/extract/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.724679 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-utilities/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.888342 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-utilities/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.911566 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-content/0.log" Dec 11 10:40:42 crc kubenswrapper[4788]: I1211 10:40:42.914993 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-content/0.log" Dec 11 10:40:43 crc kubenswrapper[4788]: I1211 10:40:43.079434 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-utilities/0.log" Dec 11 10:40:43 crc kubenswrapper[4788]: I1211 10:40:43.124031 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/extract-content/0.log" Dec 11 10:40:43 crc kubenswrapper[4788]: I1211 10:40:43.275279 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-utilities/0.log" Dec 11 10:40:43 crc kubenswrapper[4788]: I1211 10:40:43.324388 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4shjb_70fa93f1-ffeb-40d1-9df3-284d4ec505fa/registry-server/0.log" Dec 11 10:40:43 crc kubenswrapper[4788]: I1211 10:40:43.476981 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-utilities/0.log" Dec 11 10:40:43 crc kubenswrapper[4788]: I1211 10:40:43.509258 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-content/0.log" Dec 11 10:40:43 crc kubenswrapper[4788]: I1211 10:40:43.524351 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-content/0.log" Dec 11 10:40:43 crc kubenswrapper[4788]: I1211 10:40:43.724582 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-content/0.log" Dec 11 10:40:43 crc kubenswrapper[4788]: I1211 10:40:43.744566 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/extract-utilities/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.000402 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-utilities/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.029301 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-qdm6m_3406c148-fa4c-403c-bf11-02f53cf14170/marketplace-operator/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.197267 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-7xsxb_b52edb02-716e-41ca-9bdb-474fbea1e14f/registry-server/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.220558 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-content/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.271100 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-content/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.281969 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-utilities/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.470535 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-utilities/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.502701 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/extract-content/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.712551 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-utilities/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.717439 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wfsbn_83b2aa4d-369a-413f-a676-46dcb0957ffa/registry-server/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.885946 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-content/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.888528 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-utilities/0.log" Dec 11 10:40:44 crc kubenswrapper[4788]: I1211 10:40:44.897752 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-content/0.log" Dec 11 10:40:45 crc kubenswrapper[4788]: I1211 10:40:45.056095 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-utilities/0.log" Dec 11 10:40:45 crc kubenswrapper[4788]: I1211 10:40:45.068103 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/extract-content/0.log" Dec 11 10:40:45 crc kubenswrapper[4788]: I1211 10:40:45.701738 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2ts2m_91bc93e0-4d8b-4b80-a02b-527c1c6e57f3/registry-server/0.log" Dec 11 10:41:21 crc kubenswrapper[4788]: I1211 10:41:21.369757 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:41:21 crc kubenswrapper[4788]: I1211 10:41:21.370164 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:41:51 crc kubenswrapper[4788]: I1211 10:41:51.369682 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:41:51 crc kubenswrapper[4788]: I1211 10:41:51.370292 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.679265 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lvfhz"] Dec 11 10:41:52 crc kubenswrapper[4788]: E1211 10:41:52.680167 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerName="extract-content" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.680184 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerName="extract-content" Dec 11 10:41:52 crc kubenswrapper[4788]: E1211 10:41:52.680214 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerName="registry-server" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.680222 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerName="registry-server" Dec 11 10:41:52 crc kubenswrapper[4788]: E1211 10:41:52.680266 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerName="extract-utilities" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.680276 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerName="extract-utilities" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.680587 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="4849cffe-25c8-4e6a-af9d-292bf4d1ed5d" containerName="registry-server" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.682721 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.708480 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvfhz"] Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.823094 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-catalog-content\") pod \"redhat-marketplace-lvfhz\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.823165 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv6vf\" (UniqueName: \"kubernetes.io/projected/c102fa43-0d7a-4f51-a2aa-375ef0937964-kube-api-access-zv6vf\") pod \"redhat-marketplace-lvfhz\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.823329 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-utilities\") pod \"redhat-marketplace-lvfhz\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.925420 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-catalog-content\") pod \"redhat-marketplace-lvfhz\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.925480 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv6vf\" (UniqueName: \"kubernetes.io/projected/c102fa43-0d7a-4f51-a2aa-375ef0937964-kube-api-access-zv6vf\") pod \"redhat-marketplace-lvfhz\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.925556 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-utilities\") pod \"redhat-marketplace-lvfhz\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.926006 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-catalog-content\") pod \"redhat-marketplace-lvfhz\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.926040 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-utilities\") pod \"redhat-marketplace-lvfhz\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:41:52 crc kubenswrapper[4788]: I1211 10:41:52.944955 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv6vf\" (UniqueName: \"kubernetes.io/projected/c102fa43-0d7a-4f51-a2aa-375ef0937964-kube-api-access-zv6vf\") pod \"redhat-marketplace-lvfhz\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:41:53 crc kubenswrapper[4788]: I1211 10:41:53.011322 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:41:53 crc kubenswrapper[4788]: I1211 10:41:53.504438 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvfhz"] Dec 11 10:41:54 crc kubenswrapper[4788]: I1211 10:41:54.329652 4788 generic.go:334] "Generic (PLEG): container finished" podID="c102fa43-0d7a-4f51-a2aa-375ef0937964" containerID="714864ebc9172dc348122a58c940816db1cbf4748718f93f92e8ba47f49d83df" exitCode=0 Dec 11 10:41:54 crc kubenswrapper[4788]: I1211 10:41:54.329828 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvfhz" event={"ID":"c102fa43-0d7a-4f51-a2aa-375ef0937964","Type":"ContainerDied","Data":"714864ebc9172dc348122a58c940816db1cbf4748718f93f92e8ba47f49d83df"} Dec 11 10:41:54 crc kubenswrapper[4788]: I1211 10:41:54.329992 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvfhz" event={"ID":"c102fa43-0d7a-4f51-a2aa-375ef0937964","Type":"ContainerStarted","Data":"fc3df6d63d8873dd52b36348abf8bfe77d542edf19d9c77e94c93669547a7a70"} Dec 11 10:41:55 crc kubenswrapper[4788]: I1211 10:41:55.339989 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvfhz" event={"ID":"c102fa43-0d7a-4f51-a2aa-375ef0937964","Type":"ContainerStarted","Data":"3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2"} Dec 11 10:41:56 crc kubenswrapper[4788]: I1211 10:41:56.350723 4788 generic.go:334] "Generic (PLEG): container finished" podID="c102fa43-0d7a-4f51-a2aa-375ef0937964" containerID="3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2" exitCode=0 Dec 11 10:41:56 crc kubenswrapper[4788]: I1211 10:41:56.350783 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvfhz" event={"ID":"c102fa43-0d7a-4f51-a2aa-375ef0937964","Type":"ContainerDied","Data":"3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2"} Dec 11 10:41:57 crc kubenswrapper[4788]: I1211 10:41:57.362606 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvfhz" event={"ID":"c102fa43-0d7a-4f51-a2aa-375ef0937964","Type":"ContainerStarted","Data":"0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750"} Dec 11 10:41:57 crc kubenswrapper[4788]: I1211 10:41:57.383716 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lvfhz" podStartSLOduration=2.590334999 podStartE2EDuration="5.383690831s" podCreationTimestamp="2025-12-11 10:41:52 +0000 UTC" firstStartedPulling="2025-12-11 10:41:54.331626745 +0000 UTC m=+4844.402406341" lastFinishedPulling="2025-12-11 10:41:57.124982597 +0000 UTC m=+4847.195762173" observedRunningTime="2025-12-11 10:41:57.378945281 +0000 UTC m=+4847.449724877" watchObservedRunningTime="2025-12-11 10:41:57.383690831 +0000 UTC m=+4847.454470417" Dec 11 10:42:03 crc kubenswrapper[4788]: I1211 10:42:03.011703 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:42:03 crc kubenswrapper[4788]: I1211 10:42:03.012462 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:42:03 crc kubenswrapper[4788]: I1211 10:42:03.065931 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:42:03 crc kubenswrapper[4788]: I1211 10:42:03.491145 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:42:04 crc kubenswrapper[4788]: I1211 10:42:04.454711 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvfhz"] Dec 11 10:42:05 crc kubenswrapper[4788]: I1211 10:42:05.456271 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lvfhz" podUID="c102fa43-0d7a-4f51-a2aa-375ef0937964" containerName="registry-server" containerID="cri-o://0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750" gracePeriod=2 Dec 11 10:42:05 crc kubenswrapper[4788]: I1211 10:42:05.907901 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.103195 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zv6vf\" (UniqueName: \"kubernetes.io/projected/c102fa43-0d7a-4f51-a2aa-375ef0937964-kube-api-access-zv6vf\") pod \"c102fa43-0d7a-4f51-a2aa-375ef0937964\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.103575 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-catalog-content\") pod \"c102fa43-0d7a-4f51-a2aa-375ef0937964\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.103695 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-utilities\") pod \"c102fa43-0d7a-4f51-a2aa-375ef0937964\" (UID: \"c102fa43-0d7a-4f51-a2aa-375ef0937964\") " Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.105124 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-utilities" (OuterVolumeSpecName: "utilities") pod "c102fa43-0d7a-4f51-a2aa-375ef0937964" (UID: "c102fa43-0d7a-4f51-a2aa-375ef0937964"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.114491 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c102fa43-0d7a-4f51-a2aa-375ef0937964-kube-api-access-zv6vf" (OuterVolumeSpecName: "kube-api-access-zv6vf") pod "c102fa43-0d7a-4f51-a2aa-375ef0937964" (UID: "c102fa43-0d7a-4f51-a2aa-375ef0937964"). InnerVolumeSpecName "kube-api-access-zv6vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.125941 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c102fa43-0d7a-4f51-a2aa-375ef0937964" (UID: "c102fa43-0d7a-4f51-a2aa-375ef0937964"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.205648 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.205690 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zv6vf\" (UniqueName: \"kubernetes.io/projected/c102fa43-0d7a-4f51-a2aa-375ef0937964-kube-api-access-zv6vf\") on node \"crc\" DevicePath \"\"" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.205702 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c102fa43-0d7a-4f51-a2aa-375ef0937964-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.468125 4788 generic.go:334] "Generic (PLEG): container finished" podID="c102fa43-0d7a-4f51-a2aa-375ef0937964" containerID="0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750" exitCode=0 Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.468170 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvfhz" event={"ID":"c102fa43-0d7a-4f51-a2aa-375ef0937964","Type":"ContainerDied","Data":"0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750"} Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.468184 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvfhz" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.468202 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvfhz" event={"ID":"c102fa43-0d7a-4f51-a2aa-375ef0937964","Type":"ContainerDied","Data":"fc3df6d63d8873dd52b36348abf8bfe77d542edf19d9c77e94c93669547a7a70"} Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.468239 4788 scope.go:117] "RemoveContainer" containerID="0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.488097 4788 scope.go:117] "RemoveContainer" containerID="3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.530065 4788 scope.go:117] "RemoveContainer" containerID="714864ebc9172dc348122a58c940816db1cbf4748718f93f92e8ba47f49d83df" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.531678 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvfhz"] Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.531808 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvfhz"] Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.562152 4788 scope.go:117] "RemoveContainer" containerID="0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750" Dec 11 10:42:06 crc kubenswrapper[4788]: E1211 10:42:06.562594 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750\": container with ID starting with 0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750 not found: ID does not exist" containerID="0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.562635 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750"} err="failed to get container status \"0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750\": rpc error: code = NotFound desc = could not find container \"0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750\": container with ID starting with 0d6b5d3583d3f921644382b75c905c848af7385f994d74e41ad80a1249105750 not found: ID does not exist" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.562665 4788 scope.go:117] "RemoveContainer" containerID="3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2" Dec 11 10:42:06 crc kubenswrapper[4788]: E1211 10:42:06.562957 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2\": container with ID starting with 3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2 not found: ID does not exist" containerID="3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.562991 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2"} err="failed to get container status \"3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2\": rpc error: code = NotFound desc = could not find container \"3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2\": container with ID starting with 3dc3153090b081f5b3683c56582b94c83e14bb7aa19526ffa54b354bb48e9bf2 not found: ID does not exist" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.563014 4788 scope.go:117] "RemoveContainer" containerID="714864ebc9172dc348122a58c940816db1cbf4748718f93f92e8ba47f49d83df" Dec 11 10:42:06 crc kubenswrapper[4788]: E1211 10:42:06.563275 4788 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"714864ebc9172dc348122a58c940816db1cbf4748718f93f92e8ba47f49d83df\": container with ID starting with 714864ebc9172dc348122a58c940816db1cbf4748718f93f92e8ba47f49d83df not found: ID does not exist" containerID="714864ebc9172dc348122a58c940816db1cbf4748718f93f92e8ba47f49d83df" Dec 11 10:42:06 crc kubenswrapper[4788]: I1211 10:42:06.563300 4788 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"714864ebc9172dc348122a58c940816db1cbf4748718f93f92e8ba47f49d83df"} err="failed to get container status \"714864ebc9172dc348122a58c940816db1cbf4748718f93f92e8ba47f49d83df\": rpc error: code = NotFound desc = could not find container \"714864ebc9172dc348122a58c940816db1cbf4748718f93f92e8ba47f49d83df\": container with ID starting with 714864ebc9172dc348122a58c940816db1cbf4748718f93f92e8ba47f49d83df not found: ID does not exist" Dec 11 10:42:08 crc kubenswrapper[4788]: I1211 10:42:08.509929 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c102fa43-0d7a-4f51-a2aa-375ef0937964" path="/var/lib/kubelet/pods/c102fa43-0d7a-4f51-a2aa-375ef0937964/volumes" Dec 11 10:42:21 crc kubenswrapper[4788]: I1211 10:42:21.369507 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:42:21 crc kubenswrapper[4788]: I1211 10:42:21.370341 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:42:21 crc kubenswrapper[4788]: I1211 10:42:21.370399 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 10:42:21 crc kubenswrapper[4788]: I1211 10:42:21.371452 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"847163806f175cd9a844cf382073ecc2cacc7949ecf2f17154532d9647276ded"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:42:21 crc kubenswrapper[4788]: I1211 10:42:21.371511 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://847163806f175cd9a844cf382073ecc2cacc7949ecf2f17154532d9647276ded" gracePeriod=600 Dec 11 10:42:21 crc kubenswrapper[4788]: I1211 10:42:21.631050 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="847163806f175cd9a844cf382073ecc2cacc7949ecf2f17154532d9647276ded" exitCode=0 Dec 11 10:42:21 crc kubenswrapper[4788]: I1211 10:42:21.631697 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"847163806f175cd9a844cf382073ecc2cacc7949ecf2f17154532d9647276ded"} Dec 11 10:42:21 crc kubenswrapper[4788]: I1211 10:42:21.631814 4788 scope.go:117] "RemoveContainer" containerID="83ec8c8604b1f2c2582c0514e2dbf15cd401c6fe9e0f7eecc08d529f982ad89c" Dec 11 10:42:22 crc kubenswrapper[4788]: I1211 10:42:22.643382 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerStarted","Data":"b66dbbe1008d11ed11a97edefe1f5b3c41d4e287ad57224e416e26f5fe08117f"} Dec 11 10:42:32 crc kubenswrapper[4788]: I1211 10:42:32.838807 4788 generic.go:334] "Generic (PLEG): container finished" podID="402d07ff-50bf-4015-b80f-46a55aeac97c" containerID="d645d9ebed8f88ebef41ce8df67711343adefd1e16044883e03d030ef2c7bd0c" exitCode=0 Dec 11 10:42:32 crc kubenswrapper[4788]: I1211 10:42:32.838898 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-74tc8/must-gather-p6r8v" event={"ID":"402d07ff-50bf-4015-b80f-46a55aeac97c","Type":"ContainerDied","Data":"d645d9ebed8f88ebef41ce8df67711343adefd1e16044883e03d030ef2c7bd0c"} Dec 11 10:42:32 crc kubenswrapper[4788]: I1211 10:42:32.839802 4788 scope.go:117] "RemoveContainer" containerID="d645d9ebed8f88ebef41ce8df67711343adefd1e16044883e03d030ef2c7bd0c" Dec 11 10:42:33 crc kubenswrapper[4788]: I1211 10:42:33.352058 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-74tc8_must-gather-p6r8v_402d07ff-50bf-4015-b80f-46a55aeac97c/gather/0.log" Dec 11 10:42:44 crc kubenswrapper[4788]: I1211 10:42:44.684457 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-74tc8/must-gather-p6r8v"] Dec 11 10:42:44 crc kubenswrapper[4788]: I1211 10:42:44.685385 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-74tc8/must-gather-p6r8v" podUID="402d07ff-50bf-4015-b80f-46a55aeac97c" containerName="copy" containerID="cri-o://9f09cf1fc579d578939220bd2a5423b0928bf3fb66f23a93cd5c5dcd49a99a1e" gracePeriod=2 Dec 11 10:42:44 crc kubenswrapper[4788]: I1211 10:42:44.692418 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-74tc8/must-gather-p6r8v"] Dec 11 10:42:44 crc kubenswrapper[4788]: I1211 10:42:44.965116 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-74tc8_must-gather-p6r8v_402d07ff-50bf-4015-b80f-46a55aeac97c/copy/0.log" Dec 11 10:42:44 crc kubenswrapper[4788]: I1211 10:42:44.965725 4788 generic.go:334] "Generic (PLEG): container finished" podID="402d07ff-50bf-4015-b80f-46a55aeac97c" containerID="9f09cf1fc579d578939220bd2a5423b0928bf3fb66f23a93cd5c5dcd49a99a1e" exitCode=143 Dec 11 10:42:45 crc kubenswrapper[4788]: I1211 10:42:45.187951 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-74tc8_must-gather-p6r8v_402d07ff-50bf-4015-b80f-46a55aeac97c/copy/0.log" Dec 11 10:42:45 crc kubenswrapper[4788]: I1211 10:42:45.189248 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/must-gather-p6r8v" Dec 11 10:42:45 crc kubenswrapper[4788]: I1211 10:42:45.319085 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/402d07ff-50bf-4015-b80f-46a55aeac97c-must-gather-output\") pod \"402d07ff-50bf-4015-b80f-46a55aeac97c\" (UID: \"402d07ff-50bf-4015-b80f-46a55aeac97c\") " Dec 11 10:42:45 crc kubenswrapper[4788]: I1211 10:42:45.319209 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lg4ht\" (UniqueName: \"kubernetes.io/projected/402d07ff-50bf-4015-b80f-46a55aeac97c-kube-api-access-lg4ht\") pod \"402d07ff-50bf-4015-b80f-46a55aeac97c\" (UID: \"402d07ff-50bf-4015-b80f-46a55aeac97c\") " Dec 11 10:42:45 crc kubenswrapper[4788]: I1211 10:42:45.325371 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/402d07ff-50bf-4015-b80f-46a55aeac97c-kube-api-access-lg4ht" (OuterVolumeSpecName: "kube-api-access-lg4ht") pod "402d07ff-50bf-4015-b80f-46a55aeac97c" (UID: "402d07ff-50bf-4015-b80f-46a55aeac97c"). InnerVolumeSpecName "kube-api-access-lg4ht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:42:45 crc kubenswrapper[4788]: I1211 10:42:45.422413 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lg4ht\" (UniqueName: \"kubernetes.io/projected/402d07ff-50bf-4015-b80f-46a55aeac97c-kube-api-access-lg4ht\") on node \"crc\" DevicePath \"\"" Dec 11 10:42:45 crc kubenswrapper[4788]: I1211 10:42:45.488439 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/402d07ff-50bf-4015-b80f-46a55aeac97c-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "402d07ff-50bf-4015-b80f-46a55aeac97c" (UID: "402d07ff-50bf-4015-b80f-46a55aeac97c"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:42:45 crc kubenswrapper[4788]: I1211 10:42:45.524281 4788 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/402d07ff-50bf-4015-b80f-46a55aeac97c-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 11 10:42:45 crc kubenswrapper[4788]: I1211 10:42:45.977616 4788 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-74tc8_must-gather-p6r8v_402d07ff-50bf-4015-b80f-46a55aeac97c/copy/0.log" Dec 11 10:42:45 crc kubenswrapper[4788]: I1211 10:42:45.978199 4788 scope.go:117] "RemoveContainer" containerID="9f09cf1fc579d578939220bd2a5423b0928bf3fb66f23a93cd5c5dcd49a99a1e" Dec 11 10:42:45 crc kubenswrapper[4788]: I1211 10:42:45.978247 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-74tc8/must-gather-p6r8v" Dec 11 10:42:46 crc kubenswrapper[4788]: I1211 10:42:46.011215 4788 scope.go:117] "RemoveContainer" containerID="d645d9ebed8f88ebef41ce8df67711343adefd1e16044883e03d030ef2c7bd0c" Dec 11 10:42:46 crc kubenswrapper[4788]: I1211 10:42:46.506836 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="402d07ff-50bf-4015-b80f-46a55aeac97c" path="/var/lib/kubelet/pods/402d07ff-50bf-4015-b80f-46a55aeac97c/volumes" Dec 11 10:42:58 crc kubenswrapper[4788]: I1211 10:42:58.765362 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-d6d65bd59-g294c" podUID="c00f6c6d-fd4c-40a7-ad5f-ae03b73a30c5" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.085726 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rxgf9"] Dec 11 10:43:38 crc kubenswrapper[4788]: E1211 10:43:38.086723 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c102fa43-0d7a-4f51-a2aa-375ef0937964" containerName="registry-server" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.086742 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c102fa43-0d7a-4f51-a2aa-375ef0937964" containerName="registry-server" Dec 11 10:43:38 crc kubenswrapper[4788]: E1211 10:43:38.086753 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c102fa43-0d7a-4f51-a2aa-375ef0937964" containerName="extract-utilities" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.086760 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c102fa43-0d7a-4f51-a2aa-375ef0937964" containerName="extract-utilities" Dec 11 10:43:38 crc kubenswrapper[4788]: E1211 10:43:38.086776 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="402d07ff-50bf-4015-b80f-46a55aeac97c" containerName="copy" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.086782 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="402d07ff-50bf-4015-b80f-46a55aeac97c" containerName="copy" Dec 11 10:43:38 crc kubenswrapper[4788]: E1211 10:43:38.086806 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="402d07ff-50bf-4015-b80f-46a55aeac97c" containerName="gather" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.086812 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="402d07ff-50bf-4015-b80f-46a55aeac97c" containerName="gather" Dec 11 10:43:38 crc kubenswrapper[4788]: E1211 10:43:38.086828 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c102fa43-0d7a-4f51-a2aa-375ef0937964" containerName="extract-content" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.086836 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="c102fa43-0d7a-4f51-a2aa-375ef0937964" containerName="extract-content" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.087058 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="c102fa43-0d7a-4f51-a2aa-375ef0937964" containerName="registry-server" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.087103 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="402d07ff-50bf-4015-b80f-46a55aeac97c" containerName="copy" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.087119 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="402d07ff-50bf-4015-b80f-46a55aeac97c" containerName="gather" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.088852 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.095174 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rxgf9"] Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.136997 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-catalog-content\") pod \"community-operators-rxgf9\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.137157 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-utilities\") pod \"community-operators-rxgf9\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.137208 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qwmb\" (UniqueName: \"kubernetes.io/projected/7b5e238a-4bb2-4704-b5ed-68ed02c87903-kube-api-access-7qwmb\") pod \"community-operators-rxgf9\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.239322 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-utilities\") pod \"community-operators-rxgf9\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.239408 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qwmb\" (UniqueName: \"kubernetes.io/projected/7b5e238a-4bb2-4704-b5ed-68ed02c87903-kube-api-access-7qwmb\") pod \"community-operators-rxgf9\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.239434 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-catalog-content\") pod \"community-operators-rxgf9\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.240136 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-catalog-content\") pod \"community-operators-rxgf9\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.240252 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-utilities\") pod \"community-operators-rxgf9\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.571162 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qwmb\" (UniqueName: \"kubernetes.io/projected/7b5e238a-4bb2-4704-b5ed-68ed02c87903-kube-api-access-7qwmb\") pod \"community-operators-rxgf9\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:38 crc kubenswrapper[4788]: I1211 10:43:38.723136 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:39 crc kubenswrapper[4788]: I1211 10:43:39.154503 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rxgf9"] Dec 11 10:43:39 crc kubenswrapper[4788]: I1211 10:43:39.480847 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxgf9" event={"ID":"7b5e238a-4bb2-4704-b5ed-68ed02c87903","Type":"ContainerStarted","Data":"113ce93bad80eaad2a42d460a268428027a818e0ea54a113f8c9437c3666edd9"} Dec 11 10:43:40 crc kubenswrapper[4788]: I1211 10:43:40.493787 4788 generic.go:334] "Generic (PLEG): container finished" podID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" containerID="d59eb59c6b77507c150310bc9c26604fc2538d8ad53aaa412d486e41e4935c22" exitCode=0 Dec 11 10:43:40 crc kubenswrapper[4788]: I1211 10:43:40.494100 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxgf9" event={"ID":"7b5e238a-4bb2-4704-b5ed-68ed02c87903","Type":"ContainerDied","Data":"d59eb59c6b77507c150310bc9c26604fc2538d8ad53aaa412d486e41e4935c22"} Dec 11 10:43:41 crc kubenswrapper[4788]: I1211 10:43:41.503838 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxgf9" event={"ID":"7b5e238a-4bb2-4704-b5ed-68ed02c87903","Type":"ContainerStarted","Data":"fc99d76420a86c0277077168271508be95846124ea9e798044b919274cab2dca"} Dec 11 10:43:42 crc kubenswrapper[4788]: I1211 10:43:42.513544 4788 generic.go:334] "Generic (PLEG): container finished" podID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" containerID="fc99d76420a86c0277077168271508be95846124ea9e798044b919274cab2dca" exitCode=0 Dec 11 10:43:42 crc kubenswrapper[4788]: I1211 10:43:42.513595 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxgf9" event={"ID":"7b5e238a-4bb2-4704-b5ed-68ed02c87903","Type":"ContainerDied","Data":"fc99d76420a86c0277077168271508be95846124ea9e798044b919274cab2dca"} Dec 11 10:43:44 crc kubenswrapper[4788]: I1211 10:43:44.534972 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxgf9" event={"ID":"7b5e238a-4bb2-4704-b5ed-68ed02c87903","Type":"ContainerStarted","Data":"4c05cf74f0d4d206660dde154478be9b30b27578b497b0c96f6c309e3edcaad4"} Dec 11 10:43:44 crc kubenswrapper[4788]: I1211 10:43:44.557668 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rxgf9" podStartSLOduration=3.693715296 podStartE2EDuration="6.557645984s" podCreationTimestamp="2025-12-11 10:43:38 +0000 UTC" firstStartedPulling="2025-12-11 10:43:40.496251484 +0000 UTC m=+4950.567031070" lastFinishedPulling="2025-12-11 10:43:43.360182172 +0000 UTC m=+4953.430961758" observedRunningTime="2025-12-11 10:43:44.551423086 +0000 UTC m=+4954.622202682" watchObservedRunningTime="2025-12-11 10:43:44.557645984 +0000 UTC m=+4954.628425590" Dec 11 10:43:48 crc kubenswrapper[4788]: I1211 10:43:48.482318 4788 scope.go:117] "RemoveContainer" containerID="21ab95ac37fc52ee42bce14372dbf8e0f02bc927c87b7a594fda4a1e20cbe045" Dec 11 10:43:48 crc kubenswrapper[4788]: I1211 10:43:48.723322 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:48 crc kubenswrapper[4788]: I1211 10:43:48.723705 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:48 crc kubenswrapper[4788]: I1211 10:43:48.781276 4788 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:49 crc kubenswrapper[4788]: I1211 10:43:49.637131 4788 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:49 crc kubenswrapper[4788]: I1211 10:43:49.689412 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rxgf9"] Dec 11 10:43:51 crc kubenswrapper[4788]: I1211 10:43:51.604419 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rxgf9" podUID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" containerName="registry-server" containerID="cri-o://4c05cf74f0d4d206660dde154478be9b30b27578b497b0c96f6c309e3edcaad4" gracePeriod=2 Dec 11 10:43:52 crc kubenswrapper[4788]: I1211 10:43:52.628531 4788 generic.go:334] "Generic (PLEG): container finished" podID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" containerID="4c05cf74f0d4d206660dde154478be9b30b27578b497b0c96f6c309e3edcaad4" exitCode=0 Dec 11 10:43:52 crc kubenswrapper[4788]: I1211 10:43:52.628631 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxgf9" event={"ID":"7b5e238a-4bb2-4704-b5ed-68ed02c87903","Type":"ContainerDied","Data":"4c05cf74f0d4d206660dde154478be9b30b27578b497b0c96f6c309e3edcaad4"} Dec 11 10:43:52 crc kubenswrapper[4788]: I1211 10:43:52.910744 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.029063 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qwmb\" (UniqueName: \"kubernetes.io/projected/7b5e238a-4bb2-4704-b5ed-68ed02c87903-kube-api-access-7qwmb\") pod \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.029267 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-catalog-content\") pod \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.029325 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-utilities\") pod \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\" (UID: \"7b5e238a-4bb2-4704-b5ed-68ed02c87903\") " Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.030489 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-utilities" (OuterVolumeSpecName: "utilities") pod "7b5e238a-4bb2-4704-b5ed-68ed02c87903" (UID: "7b5e238a-4bb2-4704-b5ed-68ed02c87903"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.035672 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b5e238a-4bb2-4704-b5ed-68ed02c87903-kube-api-access-7qwmb" (OuterVolumeSpecName: "kube-api-access-7qwmb") pod "7b5e238a-4bb2-4704-b5ed-68ed02c87903" (UID: "7b5e238a-4bb2-4704-b5ed-68ed02c87903"). InnerVolumeSpecName "kube-api-access-7qwmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.086139 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b5e238a-4bb2-4704-b5ed-68ed02c87903" (UID: "7b5e238a-4bb2-4704-b5ed-68ed02c87903"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.131452 4788 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.131491 4788 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b5e238a-4bb2-4704-b5ed-68ed02c87903-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.131503 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qwmb\" (UniqueName: \"kubernetes.io/projected/7b5e238a-4bb2-4704-b5ed-68ed02c87903-kube-api-access-7qwmb\") on node \"crc\" DevicePath \"\"" Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.640531 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxgf9" event={"ID":"7b5e238a-4bb2-4704-b5ed-68ed02c87903","Type":"ContainerDied","Data":"113ce93bad80eaad2a42d460a268428027a818e0ea54a113f8c9437c3666edd9"} Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.640594 4788 scope.go:117] "RemoveContainer" containerID="4c05cf74f0d4d206660dde154478be9b30b27578b497b0c96f6c309e3edcaad4" Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.640593 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxgf9" Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.668933 4788 scope.go:117] "RemoveContainer" containerID="fc99d76420a86c0277077168271508be95846124ea9e798044b919274cab2dca" Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.683614 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rxgf9"] Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.691351 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rxgf9"] Dec 11 10:43:53 crc kubenswrapper[4788]: I1211 10:43:53.709562 4788 scope.go:117] "RemoveContainer" containerID="d59eb59c6b77507c150310bc9c26604fc2538d8ad53aaa412d486e41e4935c22" Dec 11 10:43:54 crc kubenswrapper[4788]: I1211 10:43:54.506694 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" path="/var/lib/kubelet/pods/7b5e238a-4bb2-4704-b5ed-68ed02c87903/volumes" Dec 11 10:44:21 crc kubenswrapper[4788]: I1211 10:44:21.368767 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:44:21 crc kubenswrapper[4788]: I1211 10:44:21.370347 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:44:51 crc kubenswrapper[4788]: I1211 10:44:51.369800 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:44:51 crc kubenswrapper[4788]: I1211 10:44:51.370253 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.149417 4788 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm"] Dec 11 10:45:00 crc kubenswrapper[4788]: E1211 10:45:00.152792 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" containerName="extract-content" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.152822 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" containerName="extract-content" Dec 11 10:45:00 crc kubenswrapper[4788]: E1211 10:45:00.152839 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" containerName="registry-server" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.152845 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" containerName="registry-server" Dec 11 10:45:00 crc kubenswrapper[4788]: E1211 10:45:00.152859 4788 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" containerName="extract-utilities" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.152873 4788 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" containerName="extract-utilities" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.153108 4788 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b5e238a-4bb2-4704-b5ed-68ed02c87903" containerName="registry-server" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.153860 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.159280 4788 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.159521 4788 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.164702 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm"] Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.255454 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9296f634-0d1d-49de-abad-cf1e06a55646-config-volume\") pod \"collect-profiles-29424165-tlgkm\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.255614 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dw25\" (UniqueName: \"kubernetes.io/projected/9296f634-0d1d-49de-abad-cf1e06a55646-kube-api-access-8dw25\") pod \"collect-profiles-29424165-tlgkm\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.255675 4788 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9296f634-0d1d-49de-abad-cf1e06a55646-secret-volume\") pod \"collect-profiles-29424165-tlgkm\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.357185 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dw25\" (UniqueName: \"kubernetes.io/projected/9296f634-0d1d-49de-abad-cf1e06a55646-kube-api-access-8dw25\") pod \"collect-profiles-29424165-tlgkm\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.357293 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9296f634-0d1d-49de-abad-cf1e06a55646-secret-volume\") pod \"collect-profiles-29424165-tlgkm\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.357371 4788 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9296f634-0d1d-49de-abad-cf1e06a55646-config-volume\") pod \"collect-profiles-29424165-tlgkm\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.358519 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9296f634-0d1d-49de-abad-cf1e06a55646-config-volume\") pod \"collect-profiles-29424165-tlgkm\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.372966 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9296f634-0d1d-49de-abad-cf1e06a55646-secret-volume\") pod \"collect-profiles-29424165-tlgkm\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.378853 4788 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dw25\" (UniqueName: \"kubernetes.io/projected/9296f634-0d1d-49de-abad-cf1e06a55646-kube-api-access-8dw25\") pod \"collect-profiles-29424165-tlgkm\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.499220 4788 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:00 crc kubenswrapper[4788]: I1211 10:45:00.944714 4788 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm"] Dec 11 10:45:01 crc kubenswrapper[4788]: I1211 10:45:01.382335 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" event={"ID":"9296f634-0d1d-49de-abad-cf1e06a55646","Type":"ContainerStarted","Data":"21e627bacbad94ec1672f8117cddba827184a97fd27e0b54061e9577e732cc9e"} Dec 11 10:45:01 crc kubenswrapper[4788]: I1211 10:45:01.382641 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" event={"ID":"9296f634-0d1d-49de-abad-cf1e06a55646","Type":"ContainerStarted","Data":"78737413015dc78d596ae6eb62522ef4dd4d2accbf515ead9717e8fceaddb3ec"} Dec 11 10:45:01 crc kubenswrapper[4788]: I1211 10:45:01.425795 4788 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" podStartSLOduration=1.425773654 podStartE2EDuration="1.425773654s" podCreationTimestamp="2025-12-11 10:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 10:45:01.417910515 +0000 UTC m=+5031.488690111" watchObservedRunningTime="2025-12-11 10:45:01.425773654 +0000 UTC m=+5031.496553240" Dec 11 10:45:02 crc kubenswrapper[4788]: I1211 10:45:02.392530 4788 generic.go:334] "Generic (PLEG): container finished" podID="9296f634-0d1d-49de-abad-cf1e06a55646" containerID="21e627bacbad94ec1672f8117cddba827184a97fd27e0b54061e9577e732cc9e" exitCode=0 Dec 11 10:45:02 crc kubenswrapper[4788]: I1211 10:45:02.392586 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" event={"ID":"9296f634-0d1d-49de-abad-cf1e06a55646","Type":"ContainerDied","Data":"21e627bacbad94ec1672f8117cddba827184a97fd27e0b54061e9577e732cc9e"} Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.052369 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.235427 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dw25\" (UniqueName: \"kubernetes.io/projected/9296f634-0d1d-49de-abad-cf1e06a55646-kube-api-access-8dw25\") pod \"9296f634-0d1d-49de-abad-cf1e06a55646\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.235504 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9296f634-0d1d-49de-abad-cf1e06a55646-secret-volume\") pod \"9296f634-0d1d-49de-abad-cf1e06a55646\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.235681 4788 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9296f634-0d1d-49de-abad-cf1e06a55646-config-volume\") pod \"9296f634-0d1d-49de-abad-cf1e06a55646\" (UID: \"9296f634-0d1d-49de-abad-cf1e06a55646\") " Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.236655 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9296f634-0d1d-49de-abad-cf1e06a55646-config-volume" (OuterVolumeSpecName: "config-volume") pod "9296f634-0d1d-49de-abad-cf1e06a55646" (UID: "9296f634-0d1d-49de-abad-cf1e06a55646"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.241303 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9296f634-0d1d-49de-abad-cf1e06a55646-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9296f634-0d1d-49de-abad-cf1e06a55646" (UID: "9296f634-0d1d-49de-abad-cf1e06a55646"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.246634 4788 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9296f634-0d1d-49de-abad-cf1e06a55646-kube-api-access-8dw25" (OuterVolumeSpecName: "kube-api-access-8dw25") pod "9296f634-0d1d-49de-abad-cf1e06a55646" (UID: "9296f634-0d1d-49de-abad-cf1e06a55646"). InnerVolumeSpecName "kube-api-access-8dw25". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.338141 4788 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dw25\" (UniqueName: \"kubernetes.io/projected/9296f634-0d1d-49de-abad-cf1e06a55646-kube-api-access-8dw25\") on node \"crc\" DevicePath \"\"" Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.338181 4788 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9296f634-0d1d-49de-abad-cf1e06a55646-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.338190 4788 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9296f634-0d1d-49de-abad-cf1e06a55646-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.421574 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" event={"ID":"9296f634-0d1d-49de-abad-cf1e06a55646","Type":"ContainerDied","Data":"78737413015dc78d596ae6eb62522ef4dd4d2accbf515ead9717e8fceaddb3ec"} Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.421974 4788 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78737413015dc78d596ae6eb62522ef4dd4d2accbf515ead9717e8fceaddb3ec" Dec 11 10:45:04 crc kubenswrapper[4788]: I1211 10:45:04.421681 4788 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424165-tlgkm" Dec 11 10:45:05 crc kubenswrapper[4788]: I1211 10:45:05.126702 4788 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k"] Dec 11 10:45:05 crc kubenswrapper[4788]: I1211 10:45:05.134880 4788 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424120-mw57k"] Dec 11 10:45:06 crc kubenswrapper[4788]: I1211 10:45:06.507754 4788 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30ed2403-2cfc-4121-a582-429b1c3443e4" path="/var/lib/kubelet/pods/30ed2403-2cfc-4121-a582-429b1c3443e4/volumes" Dec 11 10:45:21 crc kubenswrapper[4788]: I1211 10:45:21.369840 4788 patch_prober.go:28] interesting pod/machine-config-daemon-b5z5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 10:45:21 crc kubenswrapper[4788]: I1211 10:45:21.370357 4788 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 10:45:21 crc kubenswrapper[4788]: I1211 10:45:21.370486 4788 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" Dec 11 10:45:21 crc kubenswrapper[4788]: I1211 10:45:21.371301 4788 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b66dbbe1008d11ed11a97edefe1f5b3c41d4e287ad57224e416e26f5fe08117f"} pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 10:45:21 crc kubenswrapper[4788]: I1211 10:45:21.371367 4788 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerName="machine-config-daemon" containerID="cri-o://b66dbbe1008d11ed11a97edefe1f5b3c41d4e287ad57224e416e26f5fe08117f" gracePeriod=600 Dec 11 10:45:21 crc kubenswrapper[4788]: E1211 10:45:21.501375 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:45:21 crc kubenswrapper[4788]: E1211 10:45:21.520058 4788 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6eaad5db_9692_4fdb_982f_22fd2703e0b0.slice/crio-b66dbbe1008d11ed11a97edefe1f5b3c41d4e287ad57224e416e26f5fe08117f.scope\": RecentStats: unable to find data in memory cache]" Dec 11 10:45:21 crc kubenswrapper[4788]: I1211 10:45:21.578443 4788 generic.go:334] "Generic (PLEG): container finished" podID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" containerID="b66dbbe1008d11ed11a97edefe1f5b3c41d4e287ad57224e416e26f5fe08117f" exitCode=0 Dec 11 10:45:21 crc kubenswrapper[4788]: I1211 10:45:21.578536 4788 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" event={"ID":"6eaad5db-9692-4fdb-982f-22fd2703e0b0","Type":"ContainerDied","Data":"b66dbbe1008d11ed11a97edefe1f5b3c41d4e287ad57224e416e26f5fe08117f"} Dec 11 10:45:21 crc kubenswrapper[4788]: I1211 10:45:21.578783 4788 scope.go:117] "RemoveContainer" containerID="847163806f175cd9a844cf382073ecc2cacc7949ecf2f17154532d9647276ded" Dec 11 10:45:21 crc kubenswrapper[4788]: I1211 10:45:21.579721 4788 scope.go:117] "RemoveContainer" containerID="b66dbbe1008d11ed11a97edefe1f5b3c41d4e287ad57224e416e26f5fe08117f" Dec 11 10:45:21 crc kubenswrapper[4788]: E1211 10:45:21.580252 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:45:35 crc kubenswrapper[4788]: I1211 10:45:35.495987 4788 scope.go:117] "RemoveContainer" containerID="b66dbbe1008d11ed11a97edefe1f5b3c41d4e287ad57224e416e26f5fe08117f" Dec 11 10:45:35 crc kubenswrapper[4788]: E1211 10:45:35.496947 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:45:46 crc kubenswrapper[4788]: I1211 10:45:46.495201 4788 scope.go:117] "RemoveContainer" containerID="b66dbbe1008d11ed11a97edefe1f5b3c41d4e287ad57224e416e26f5fe08117f" Dec 11 10:45:46 crc kubenswrapper[4788]: E1211 10:45:46.495949 4788 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-b5z5h_openshift-machine-config-operator(6eaad5db-9692-4fdb-982f-22fd2703e0b0)\"" pod="openshift-machine-config-operator/machine-config-daemon-b5z5h" podUID="6eaad5db-9692-4fdb-982f-22fd2703e0b0" Dec 11 10:45:48 crc kubenswrapper[4788]: I1211 10:45:48.571762 4788 scope.go:117] "RemoveContainer" containerID="d28e374c8a0a97bef6392267803537c64cda65d12e55519ff776af414dbc9c6a" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515116520346024450 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015116520347017366 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015116506062016507 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015116506062015457 5ustar corecore